var/home/core/zuul-output/0000755000175000017500000000000015134206321014522 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015134224242015470 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000331520415134224123020253 0ustar corecoreS(qikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD P}"mv?_eGbuuțx{w7ݭ7֫~𒆷7̗8zTY\].f}嗷ovϷw_>on3cvX~egQBeH,nWb m/m}*L~AzHev_uαHJ2E$(Ͽ|/+k*z>p R⥑gF)49)(oՈ7_k0m^p9PneQn͂YEeeɹ ^ʙ|ʕ0MۂAraZR}kDbп3 FKX1QRQlrTvb)E,s)Wɀ;$#LcdHMeBmFR5]!PI6f٘"y/(":[#;`1}+7 s'ϨF&%8'# $9b"r>B)GF%\bi/ Ff/Bp 4YH~BŊ6EZ|^߸3%L[EC 7gg/碓@e=Vn)h\\lwCzDiQJxTsL] ,=M`nͷ~Vܯ5n|X&pNz7l9HGAr Mme)M,O!Xa~YB ɻ!@J$ty#&i 5ܘ=ЂK]IIɻ]rwbXh)g''H_`!GKF5/O]Zڢ>:O񨡺ePӋ&56zGnL!?lJJYq=Wo/"IyQ4\:y|6h6dQX0>HTG5QOuxMe 1׶/5άRIoNbYe獸]fNdƭwq <ć;_ʧNs9[(=!@Q,}s=LN YlYd'Z;o.K'[-הp|A*Z*}QJ0SqAYE0i5P-$̿<_d^"]}Z|-5rC wjof'(%*݅^J">CMMQQ؏*ΧL ߁NPi?$;g&立q^-:}KA8Nnn6C;XHK:lL4Aْ .vqHP"P.dTrcD Yjz_aL_8};\N<:R€ N0RQ⚮FkeZ< )VCRQrC|}nw_~ܥ0~fgKAw^};fs)1K MޠPBUB1J{Ⱦ79`®3uO0T-Oy+tǭQI%Q$SiJ. 9F[L1c!zG|k{kEu+Q & "> 3J?5OͩLH.:;ߡ֖QʡCOx]*9W C;6)SCVOאUʇq )$ {SG!pN7,/M(.ΰdƛޜP16$ c:!%Piocej_H!CEF L훨bِp{!*({bʂAtĘ5dw9}ŒEanvVZ?C}!w,ƍͩ?9} [oF2(Y}Q7^{E}xA|AŜt;y}=W<*e'&Ж0(ݕ`{az^su/x)W>OK(BSsǽҰ%>kh5nIYk'LVc(a<1mCޢmp.֣?5t罦X[nMcow&|||x:k/.EoV%#?%W۱`3fs䓯ҴgqmubIfp$HhtLzܝ6rq/nLN?2Ǒ|;C@,UѩJ:|n^/GSZ;m#Nvd?PqTcLQMhg:F[bTm!V`AqPaPheUJ& z?NwpGj{VjQS,؃I'[y~EQ(S +mpN, Mq 70eP/d bP6k:Rǜ%V1Ȁ Z(Q:IZaP,MI6o ޞ22ݡjR:g?m@ڤB^dh NS߿c9e#C _-XѪ;Ʃ2tStΆ,~Lp`-;uIBqBVlU_~F_+ERz#{)@o\!@q['&&$"THl#d0 %L+`8zOҚƞ`wF~;~pkѽ)'cL@i]<ք6ym®Yi&s`dyMX](^!#h k:U7Uv7쿻чd)wB5v-)s蓍\>S[l52, 5 CۈP$0Zg=+DJ%D  *NpJ֊iTv)vtT̅Rhɇ ќuގ¢6}#LpFD58LQ LvqZDOF_[2ahtޙ-did˥]5]5᪩QJlyIPEQZȰ<'0D"\KjPQ>Y{Ÿ>14`SČ.HPdp12 (7 _:+$ߗv{wzM$VbήdsOw<}#b[E7imH'Y`;5{$ь'gISzp; AQvDIyHc<槔w w?38v?Lsb s "NDr3\{J KP/ߢ/emPW֦?>Y5p&nr0:9%Ws$Wc0FS=>Qp:!DE5^9-0 R2ڲ]ew۵jI\'iħ1 {\FPG"$$ {+!˨?EP' =@~edF \r!٤ã_e=P1W3c +A)9V ]rVmeK\4? 8'*MTox6[qn2XwK\^-ޖA2U]E_Dm5^"d*MQǜq؈f+C/tfRxeKboc5Iv{K TV}uuyk s" &ﱏҞO/ont~]5\ʅSHwӍq6Ung'!! e#@\YV,4&`-6 E=߶EYE=P?~݆]Ōvton5 lvǫV*k*5]^RFlj]R#Uz |wmTeM kuu8@8/X[1fiMiT+9[ŗ6 BN=rR60#tE#u2k *+e7[YU6Msj$wբh+8kMZY9X\u7Kp:׽ ^҃5M>!6~ö9M( Pnuݮ)`Q6eMӁKzFZf;5IW1i[xU 0FPM]gl}>6sUDO5f p6mD[%ZZvm̓'!n&.TU n$%rIwP(fwnv :Nb=X~ax`;Vw}wvRS1q!z989ep 5w%ZU.]5`s=r&v2FaUM 6/"IiBSpp3n_9>Byݝ0_5bZ8ւ 6{Sf觋-V=Oߖm!6jm3Kx6BDhvzZn8hSlz z6^Q1* _> 8A@>!a:dC<mWu[7-D[9)/*˸PP!j-7BtK|VXnT&eZc~=31mס̈'K^r,W˲vtv|,SԽ[qɑ)6&vד4G&%JLi[? 1A ۥ͟յt9 ",@9 P==s 0py(nWDwpɡ`i?E1Q!:5*6@q\\YWTk sspww0SZ2, uvao=\Sl Uݚu@$Pup՗з҃TXskwqRtYڢLhw KO5C\-&-qQ4Mv8pS俺kCߤ`ZnTV*P,rq<-mOK[[ߢm۽ȑt^, tJbظ&Pg%㢒\QS܁vn` *3UP0Sp8:>m(Zx ,c|!0=0{ P*27ެT|A_mnZ7sDbyT'77J6:ѩ> EKud^5+mn(fnc.^xt4gD638L"!}LpInTeD_1ZrbkI%8zPU:LNTPlI&N:o&2BVb+uxZ`v?7"I8hp A&?a(8E-DHa%LMg2:-ŷX(ǒ>,ݵ𴛾é5Zٵ]z"]òƓVgzEY9[Nj_vZ :jJ2^b_ F w#X6Sho禮<u8.H#',c@V8 iRX &4ڻ8zݽ.7jhvQ:H0Np: qfՋ40oW&&ף \9ys8;ӷL:@۬˨vvn/sc}2N1DDa(kx.L(f"-Da +iP^]OrwY~fwA#ٔ!:*땽Zp!{g4څZtu\1!ѨW(7qZcpL)ύ-G~^rFD+"?_h)yh=x>5ܙQ~O_e琇HBzI7*-Oi* VšPȰһ8hBőa^mX%SHR Fp)$J7A3&ojp/68uK͌iΙINmq&} O L-\ n4f/uc:7k]4p8wWLeUc.)#/udoz$} _3V6UݎvxyRC%ƚq5Щ/ۅw* CVo-1딆~ZYfJ"ou1ϵ5E bQ2mOΏ+w_eaxxOq:ym\q!<'J[FJ,4N:=6. +;$v6"I7%#CLTLyi{+ɠ^^fRa6ܮIN ޖ:DMz'rx#~w7U6=S0+ň+[Miw(W6 ]6ȧyԋ4ԙ./_A9B_-Z\PM `iĸ&^Ut (6{\٢K 5XGU/m >6JXa5FA@ q}4BooRe&#c5t'B6Ni/~?aX9QR5'%9hb,dsPn2Y??N M<0YaXJ)?ѧ| ;&kEYhjo?BOy)O˧?GϧmI C6HJ{jc kkA ~u?u7<?gd iAe1YB siҷ,vm}S|z(N%Wг5=08`S*՟݃*־%NǸ*kb05 V8[l?W]^@G:{N-i bɵFWǙ*+Ss*iނL8G9ٻҦ62L0ډ"ܺ_z9JNȯ=@oUI y4ωz%lOONRѦmDVmxюݏX}K6"Qi32\-V_kR(I-wtSJR^m{d a|y,F9$^@mdH֙toN1 < ҷBq/)i_TA|S2G4miBȨHM(2hys|F 94 DNlϒòκ-q|xC ,gKDzHR%t+E/wd#礱ºȄWEz o\JξB.wLKZ39(M +(PWՇfR6#ю3Ȋt ݪbh]MTw䀩S]'qf&)-_G;"1qz퇛0,#yiq$ՁɄ)KٮޓJ|̖D?:3mhW=rOf'/wѹ8BS8]`;=?,ڼ"ϴq*(A7? /W= #^ub"6q f+=^OI@߱^F[n4A#bYѤwd)J^Z{*ǥzw73LuaVad=$6)iI gC~.1%YmҪ+2gSt!8iIۛ*JgE7LGoş\bC}O i ycK1YhO6 /g:KT sPv6l+uN|!"VS^΄t*3b\N7dYܞLcn3rnNd8"is"1- ޑܧd[]~:'#;N(NknfV('I rcj2J1G<5 Nj̒Qh]ꍾZBn&Un' CyUM0nCj.&Oڣg\q0^Ϻ%4i" ZZG>Xr'XKc$2iσֹH<6N8HSg>uMik{Fm(W F@@{W+ߑ?X2hS4-=^YgpUHެbZ!y!ul@ڼ63" ۩:6=TZõ$E,ϓRV|G&$rr;J TtIHFE=RȬ]P pLm|?$%>Eü%mWO[>Xmw,*9.[G n >X8Ī;xW%dT:`ٓ~:VD)O>UD;;MY,2ڨi"R"*R2s@AK/u5,b#u>cY^*xkJ7C~pۊ ~;ɰ@ՙ.rT?m0:;}d8ۈ ݨW>.[Vhi̒;̥_9$W!p.zu~9x۾vC;kN?WƟ+fx3SuKQqxST Ζ2%?T74a{N8;lr`$pZds=3jwlL Eڲ t|*n8[#yN SrA GYb8ZIaʼn8 #fg3i`F#5N 3q_M]j 8E!@1vցP7!|+R@;HspSI]ڻCZUcg5pDcIϹ,oN-_XI,3\j ]ٟ5~' SuipA!C厐$&k7dmhz/#"݃,YqCL$ڲ`"MUbeT>Xuv~4Le͢ }UVM)[A`b}mcE]LCEg=2ȴcmZ?E*-8nhױ1xR2ϫCya` A y!?h!9yL%VLU2gr26A!4vbSG ]ꧧWp/ &ee *w$-`J\ ptǣC^p#_`{ К8EW>*(D{ٛ,[fnY𱹞M=6&$<,"lX-Ǐ_whaE 98 (oѢ/Р΅ 7ցl6618ł_1/=fu).s¯?.S[{'g=Ҥ):d8h\y6]t1T7IUV:;.1& ,5΀j:<< +Y?58In'bXIǣO{&V\DŽ0,9f O_"[l:h¢8wݓ19\:f6:+ .3}=uvKc ٹeS<>ij(o'ciS<{1$E[nP b?8E'xv[K+E{,Qƙ1*dcs_Z'407|qBOgYU|U--sG8`u! qGYܷw;ȌCPc_|(RaIBKb+{P.T! =ĦiTob d<>SHr][KqWs7ѝBYǭ~RR"p9dFg|K- obY_vM 4>/]e/dy,8!xŋ5 R<^mYo 3c9(F?he:9[v~\:HP 8'k0t1A!jlX)v/L+NhBUx~Ga>Z"Q8_jTLRKtL L+BT-҂=ll魳Cf[L胍̎`7rIkzS- (J[(6 b Fڨ? ZvƂcUkdύ-׫E7e0ϕ{6K!x^>$ N7 l 2JZ=0]Sה(*CjaS:p/N6I*Mx8"EȿQa[1 ŶD3u8j`B59qU]ג`upHЍE_f8TK&cB\L@XY2 y_CJ8WrSHT{dp"ӹz'eJq2WlXz60H!ND@UwVFڧD5>H[f@!=߸6V[%Zn|"G4ȇʩ@]>Y$9FWōm_Tt)ib+q&EXFu򾬳ųqTĔO_fC2NfOwCm;Ll9wQÏT!9U}W3Q#vc]ll>ŰAVG Y%.9VndЗ? ǫ>*Hk6>!8l7> c7!8bdEˊx9y:9244ANb n\"X>Y`bb*h%1(*Dra^sh6"BzƾH( ."e1B QhmvKlXtӈx92aI`"Ǒm O\B!,ZDbjKM%q%E](>Hm 2z=E`^LRф%V Ng2Kh}?l@VC|J,Ah%ShՉ8Y4czt~$)*527l;V jэ(_,jm$9O+/S`]icm wy\Mڗ$-DJ|lj*}b,?XAe0bX@ h0w[}BU0v']#,.C!Pu&Jm l ?Q>}O+D7 P=8! ЛN_[d0Yݎ@2!vZ{Ibi/^cygwpГzY'Ź$[fr;)ٖf ՠ5Kcxg* E Qu{$Sڸ7p~ 5ClkmS?J%E92' {]ҙ%rXgs+"sc9| m>T]"JرתBΌ-z:ԭ!,Z~eL:U⧘pvzz0V 0'Dco\Z^dnZ7a)AH ߘ§gb'Tu&T~KQg\Ѝpk}#UH_> ,+3Wo~mB@!YgҾ߾5I'w_9I}qww}U4뭱ԏ,}e]ukDl`jSQ7ׅHa/˟EU^Ip~XWW兹Q7WyTz|nˇ q_A-[~W_w*o:nUs$ -#9wh:R 4+%ݽs&Z&em-ld b.E1բ${]]Nj"䁖%5#3dCY%HAK1/FnRL3XɯEr^v,bfbIJ'@hX!<[@ ,&,]$*բk+E$dwS:֢̆ Uh``%NĀVecK[ld-'“5XIυU0؋6\h%1GK(-Yv% 'mQ; GdZ%gI-XE]V f#, `0!E%:H =VՑӄ| ņ6lL t1]Cr^ݔ[TN1Vz[~Sv@.yΕ`%3>|ttc‡-5=VW+ ?Vv_ﮓW puȇml/-S:ŸvŀU#-:m\xkjr)_x*8ͼ8! `:m~*v+paRVIr %A%`[oLxSzR怕#H% }sCcv"4Iۄ+oĤz'ep2\3ǚc7:$[|a.f0E*0.)ZyT1t3`thU^ۢ, "!Ӱ&jDkMAhQ|![gIK v~(Jc%* [dI368fp*}Dscǻ3V]dt*am.>!LcoJrKmzqvmz܅EAZ#u-9`x 92$4_!9WՠZ̓?Wnm>0Es%DƖ|2H\2+AaTaBˮ}L@dr_WԦc>IdA Od[=jlek=XJ|&+-T1n8TڎP$%s,qgt+ZSxToE7U9/nq.JY):Y:7AIU"cKӝ$'qo%\Q!%c5\Z9N4Zxz,dI*ƙ(EfE"`{ipEIՒ9| Olz3;QϢ*:]ք+I&s5w` q:CdʰH`X?"}B=-/M~C>''1RWX%2@KʸH'sۄ`gRpcf:|XUZ#OSt/G~-~o2:u)"\**vdC_ˆdvcƕMlA&HwlF@ա5+F>ΰ-q>0*Oѹ eO/I!m|xV&\b<9$4Nvm^آ]$GBoMjKٕy{H 31Հm-PġUX$[eR6Oœ-m~)-&!883\6y 8V p-lprG]斾-3jsqY~ sj\+9[rAJsT=A"dadY(R+q%so 9Xe>sY~ am9] x*7 zjC.5Wg󵸊y!1Ua5=taU>ƔCmp-7^m斻[h$~Bdz0jKXq"mũɸHNd"yc Ptu>x2;W`VZ l0VmWh#ZYBK]nc˝߂~[jRuo[|["w;?2Y :WLg-Ӂ ;UAG1 '3Jٵ Q;ASUȿjek3XLHV sR ҕNE@Ubc@ ۮOȩlea?C&kOf}}+jVO,̸7Nj.q\חq,on[{Uw7Ϣz(ϢceއE1ZI&[V=͋A,z`S,J|Lh/rʑ>}JXԻq̎'QIC(<ǩJq lc*~;YK OIXA|iػ߶%_B1ƋM``4Mk^a=*e'VMî_&I1OdRO֔Ȯ*L.9`WmR$<]Os]UEki#UO)nJ5XC(RB..ʴYvV1jVM%usk*nvܞ%_~CUhw !Y5H2A$wR<*]UyvYxZWQu]FUUn,VbP<ݹ dWoկ AzD3+|V"ύxÝdzrYB\\.`nr_@J WNI-[:%e-ONݰ 6 =0"__s}ӈ؋H,G+b5 ;ssuwF ?uqe9cx]\EzlAem,q5ω,`w-׋CL;B_y,cȊVU=u7]}P;gy3q9PdX֛|Y\aZ5 9"oR x`~ cGStg^[^% L-<3iW3bD!_~oTExxMzp#!> s] 1mpCst?025e("7|;N+'s[ӵց x\X)dH]|sp>Mtuܴu\-glov_yT4WQUﴠ*(kx9`DGֿm幞eں;'innz瘿DrO$T讼v*abK"ElY~V@7lwdrݝJohL #}N\ngrčȷA>Ţ(c!7m`XL|@%"D2idl;Qዱ,w n),Ckߙ'U NjGU]*ĈmsYۈ 3-# M˲0b?0|kL"diV4Ca9O&|#] [#X: ˊ%*H[z<W"z-B^)4mɚV&D"EB!%0jߌ=Kd@M #0+o"Gx_`|<) Y11Q C.:3KuVh]&`BrK..yxX8O(lqSVwy ErW ~|FD |v||9Oa5{J GdTDCYS^X=S\}˝7ł6O#5jrӼ7F/+T&XZώf"8ZM#uݍ͚ >8IITb, CQ}MA7u%*gD\:mN uнJIyDVMٖ&>̗q61 39m-dJvEM*n$ G Jn \x>$ Re7Ei.H0HUWj 5!p rAz_[ .aӼu# P)YD"5p{H@A(.5ժP_/Zv@m..5VES嬔|3yEʦ1_>i;V{e: j.ך\XFҨNچn^YH *(:bY}H˺]hZW4ɒzЉ^ @. .E8+(bsh濿gf! 5KZA vk/i9o D+ * [6GwXd8pB {&eUD ԝN*"Nbj(_o$QG+'7И moIH})Y^2&jeEG$Rߢ}/Uܥ B/t5G1*L:>"Rajm5x*$V8*kAԞ89() OLGYT7w7\ils]@8E.cW:`> {=ڻx{Ux޳@ $XW9LȒED.|F86`v\^ArHdWާm_-aC%|vmEDq q+3+Wk!иP5] a3‚f i|X86B@d[g#[Ub'^D UE1JPX \}jKuHdXq&U>6հS`4tYbxR{Z_ZKF]P{$rmk-TB+ˤ{Z2tk'vPyVUbw3jI]%D^|CwT"B-骕֒i^Tb)ax"0F ~xqviZ0[0ipK `>`:0z(/h 1]Sv6}ͦA!Hb](}=x}J-3}Эmϋ||Su|;O?kv{⛭=oM޵6#BmlpE&`:٤3{t#%9Ѵmy%;\HIcJ[ `H||U$,C<޽~[M +sEt_pfu$x:G,:+8g]΋FF 0+&)D)Fi )FY1D#8'-;~ .יHMaVץ ¦V0ەD%f{XE;2P$(#Caӯ5{[a?L ۅ< vi${I9qݮ#.@{F30V{w=FdCgW8]d<{ogθ9y!/I= cVv#u">pС< U/U㺧qk^Z f*Ϧ{O Y{s4&^H֚J9[:e!\{ zձ&OC{Ykm|Oǜ۶!Ӱ(4-$6ŶHh:ٕ_̙P'r!u%7 @q%,+f-a NXO0lBCkIhDѿ\Јo9Gh$jv++!BÌ"1kB+&1YǺeo'c$kI[]riv+A"rE`4#P:j8(0ϵօ::YCw2/ \n䈠X6H;YXcu! zp] I9ye_~e/-& /- 8'0R$K=IrNS93< a\gH#pZ„}وfe A,DBe c-2Li <σ^ "}K&\N+y߂P).tc0|,H22 1A6a<(Rf_^(9 Y<1p=]?avf:S <:&v\) 8'dE/% nɑA נĉOQ^D:xyM?YU33='7=wPMr`I\8:gZ>MβQPq\S-JymnEj W^`ٵ=Opvp9 MD^gY `,;4v7Bڿp< h//F,i}i vD(U`sEp3/d X/1k>)_u]٪Aϫ_pep {dݚ׆Іܭlabv[;K }88>?!dJ'ph_o!&&Jvg5{n~3t:.S7x#''tshN9A; ,avAnW;= 5z Iܟ]': v wh!B#siECq]Z>k6ZUp.Wo&_>oI2AR3Of AwzT4[c YѐUf3p{ /[ ̮f}xJ D xZZioLvz2zN~ G٫D|YzN0,3rh= K˷ _U14v\ 9Q{QKе \HA"w=D i{ b v=#j(+, 0ӦMsu2!bjyRZ߅LXxMN$oaҞL\O oT1eGͥYl˴9:ޣޗ`&eC)$KTg&Qr{1"^i:K} Wln⯸ڏQx} D~D}"km(VQAE۶ ]ڬ˗ua;Gu UݵUnڇAնqaK]ߎ+pC6 fkk۞>= ~60l0(mɬр`H =9Vm+{Mt*4x 3RP%aDծ[٭]t4lc7*ь]|} -vlTuEmƙWmETcr5(@Gu-NM B^e[E(mJ nO(}tGB6 )fLJ9ta[n5 svftYZl{ZHe;Rʶ jjmA=v$ڂP$oA(ߞP6ߣΨsjiz5NDp2qi!:Nd~.$N8w ɕ;FnF$&q3Z1ݮ!ҿxïh} u{}~<Wsq ه,K0'Y1yL_*peE)ZgI^4* ϣq89Ψ:V6 fWy2V\T^(̲4`r94>mi_eI.:ZCPoa*:hqƨr(ZxHY?!" -/NZk0hx>FK/ĖF9PETHAI~` ƁQ8K@qƃ+G-n7T2kZ(? '8XHH/)-+"窔2£ XuE_%jʀƸ 82R,iQ|xm%x=Y8x(1K#Y[[ß.B8[@_4'_<'q{^Yy@5'le7{|c(j+) )n8?Sx[0^H d!& 1t,cx@y`&8Dg(ŴF}L}L{W* {6,B)L9HSL̗ɢ ip>!^%ߑϥ_,U 9&C/cA16YտSRqrgHQ:<1:(Y[%k!hYI~? RB?@k_ fi\&%ITXƒ5Q P W0G2׎)W FZ5.''r".& QJVJL.%2Y.^c) }d[ox? ҜANQ^dJo ]-E]i/PzoU[O\jҝţE ~&[p!{<F2^yؚ>)_g$c~㓷l啌ȅqt+HCvr7FfOcCij8,ԪַS'wd<ͯpsr*imPw|#==T=Y=wgԿנTϤ̧}=ɣ!rk j {8te4D1EN92@$w~qP+$:8U;# <[Y6eF3wij|=HE^ųT bݤ'OZjg\mHQDL.xj)?$y'zpxһ]En(ҦxTۓ`m=,EsVAZ\hn;Rc]<4M9#ǪG`Ha)(L~Sk+Rz4,xSP?8b3 ovPAaHA6 ؏6eUg[H2f\;!tj:ʯJ]F7XSҙV^]iGGXim?Q3mB9s#vPm)+WL&ŗzrI )& ^䭯ҹODɐXugvZKeNBg 2_z$4I08g@ wH| et@H9 dG)dO2髃ߑ091wG.Z&I@eJ0p4OW_dz)"խ~ǣ7~MBʅ_}vQtvp{(9?o \ՃBK,~QIߠ7s`Q8َ2]B#O>&@ҁ-G"Β8a;>T䣨Q+]7gc=:w6,-J `K8R&ttuf @QfQ\\'N²^U*K=^x!(T Wm5c.Q<\\uQN8m[L6sﹴǙ4ת3xY=:$eΛ^Uv]Z$_ilo,TO*yJl`H59?0;f pLMI}]>-W촪︵%P:9OALU%_{TeB[͎ZynA1ud1ɻ6%+7^ڗ 0~&\𝔽\< q&zN9't&o{.2M>=>ѫ^_{FeջV^שgRT}p>L ~pD%3>' b)=.9 Ƈ_v@!/ZI9fLTLgkXrֳtRQmDt3YGvHi4坖.~W[p4QċV,Jd69.yk{ F Ic14f+1}H,)mt-VՏ0 }HbjD{hVI3fI²OA7!dt wh1m vU9d 9B̶S@$vI9`PtJ"tʒy9\WUX[6HbkYX( `JKGftKy WsM1B1F! `.b0"RfX܂++{52_cb&`,1}+;I|(tF &ioKZi壨 F; e=Nʇ2/co~ 2)2Br[X HDOb<Ig4OF':*i<b)a(1 ]$v%˝f(kIxȪFatbV!-H"<T΂K3._ǯ(F3 4**g_bDV \K\81(s˲We= G?g2'.K1I|M6󤑮F[,@ly?#A2EjE i.F1g]3BJq19qrNBi?SKKT$H:GqLD^eEphxʭB-RXհhPAe pbL痄:=VCX3,}x/E9uw1y3jO,q#ږu16PmÈagl4b*ذtj`+wp8jBso[1)F'Zmb.'/+yNcҙ̀~'. zS)k, i*ZpKӁfWo܎W*RX3WZ+YK=߲fQfbԊP Ĉa 6k!Pjnx _ ,M[рUDq}Iv7\Txso{T%,H Ŭߚ$8} FUaFpgll INᐯ+=I w7WGt+p²Pu2PF,6ג^e 4*O\s[=Fé'Cd>b@i'k)VRז:ԋ!C.KkIxބ:$ЌVU$EŒYpS0)ZvD #&.̢%^TeIQeiŢ<v(5~{vdd=pL8m)Fϒ<x˘R_jU{X%ji_2xZհ Zkd+S` rl^7k}.} :ÂENlV"$W;>PHpԃ*8I#;RLHYeFڐB# +TKeVPT$ 67aw%~ +`Eg!~ lQޞ^v/$8zY,}3.F]tJdjSiZ>Ȩ4BҀn&!w3V7Q*ta I#S2qUn)?z& /]O1n3?Ud:'FFHbu АaBg[WO[/|5_TQEQu!#*ꞻ1KKn*TU'C`}kU9)}jji8~N:8se$FHؿtۑ⨸OB3p'XI0SQ"$&CrpU?Utl=kyTgr}/7?H㴿)Sh sFX) c}pe RG#.I+ dy]Ģf !߯Ossߗ#/F_Ť?kv:}spJDߞ< jZjp(;YƱ`4+/E0Q!`$ 1c q>ez! 4&R2jiA%bȻɌUZ1ژёQfVLKgw9Ol5xFG;mZ'7[e3&W$ Ib/,9eʂ'IY#lmN47VN6O`L LDb,^aAH!<6BF}Lq%Iޛ$"ke3A*us\L/*+uFTb6~$8ZYSGs!:X/a#jgOgYT{v7-K)\D+:I~YʠZFEk1h z7֠xt”)T!Zs4(Iכmq,*@^kNTJIaRtW?xaY&lxVG8HlO}? t6VwdsJBiSDb!p%JZIaIסU@s]TD8 'POtjԫ361YquQ.Xbn?O8.Gݳ縁H0rQ/ʈN\^ԀY6f@8u/jπd$ w+14~xIU*̦a*۟ -*/7c>ʰUȘ1?6ǾpRm|pmY Կ[`-Vg LY 됝S.-Zc)!V*fFd5ۭ#>#HfY|=bʬ<Ƀ$mU"G~:O 'n#IGo ]V`]caJ%Ėc$I/R쓑rp K{<.,'V(4d #i@^Jo՗pEuvS{̚SpՒ#')ꭕ-FC&˗^C`w"vWjA)Ij\*EmZlY<)[vYCx LFڏ̉\M1cb$g=Ag PT1ò:_.aTy(Le[)^t5KqbT|_2O:\EsjxˎGU-zU-z5H+o31+ U!ՠ)̋ZG<#uW7G@DB4s yUA#9I;=1jYk4It }k9ۃmWewV޺ #fq~GYeqm$j# bg+;x|C|?a'ѩE-Ոc6><&a\pJa5Kłfl2{Q>{Ccp̳MְmcŠ5[d"I[ "g=LM$m&r m`/',=W&lWw]z]vAmb>zqM > ]0:N<\}pP LN2M-2Q*4œY apHO:O<}c JdMoISs]i1?¾e^Yڔa15υ@AH1iq,['G! 76HSܤHYo.KtB!!$ac!ڸ92ZdxnTc7x28&톒n?qpܴn,D{Az6ڛ DH/W^)k>zDhӬÃ1|'!en77ݎ_lJ$ qk9OSh!Co6 }F [\g`Li'اDT~Xd۴LYa[N۴Ksd#Y oG31 &_w$8VY;jk=P*i9xוֹN$jK'ZmFm ~eH [ynr^Zg\J u(,~ggA"rM$ a{W.2o\g6 ppBSḊэ \;%%l?lp7Bj_+lw^݁{\(WU}bx`̿gW;˫$ww`hA 뢯KDHyRܹj4&ʾ7O.HF8wJsQDĀzϯA; >ɻ=‹>x6=}`  aO3]93RVں{GFWvU.Laꍋrk<s󿋄H`\Wgy,1Ta )_t¿޼LwmKsQ5t]P`*sQ~st %zL]X%gA/x6JǹF 4Xg8zn7XCbnsC}Dž@f&|^Ly-ň%qUq~|O.l8d6w@DߔlWb!G"qW%Zq,(,O/pƇCSZЬTO+8xhy&\F, ;R~~8?DE8;Wg A>{wI.0)Mcc*/A>`&3O& Gmp8%9## ;*LJ Vu?8+!$gTNz>V+ymUD+o7$@DkD$&s/E<,x.Ixx:%k|Jm &i?[}yL=F{]{ڌ"5#59$Yמp)b{zpW^c+L{cbk9r_FeV Xh VwM]UR2Nɞ`?K>iFH`~TtDpt}eä8FݻcB6XA@W eCqe졠 {n=5;&$CzgS- י =HT7`'ᇏ_"9!~w3\gB: ōjڦ:@jϱ_jjD<֟ZWպf]̲P[\x8ai !gn#aB3xݎjxk{?Y%FHJ,thF_PXet.R̩Ygb57`Q2a0X-&v\l P2Q-F9#5ǥ]]r '8{c)v x  k4,N,.mݎjqFΗ8oy!N8Po 2WC?py)Gir@'!0 Qwf*D~8YD'!㴇h?}w%QO0! Wua$& k[}/"Cx 6ٹR17(9)Uۓ -<Q|KӡOSmn~Eɽ? G8oe'}?p}AY6k˯WA؟q[r?C5RiczaL1Wy|Vn\P }a^7WzI#؆9pnu$PT lֿ f~p쪮TnϏ> 27Mf1`~[Y>;UPA1PpSZiE6g +'dݎO$ƦAkePJؠ:wÓof$L |S7t %F52&Mcj%~7nt~[҅sw8O\}v?Sv=W{ƽYE_ʹ]{q$/r(o'?/O<`L?CH؈$16V#dCyIs"h$t >EۊLU\jd(\[,<ϼLgކpNuE,&*MkUjM8"'YJrR'Čϭ" s,uLK*6uTBJ6ވPJ(ܝӔGIK ܀}/89g1;RZ186s:sg`HQG H$|i:ximA{lYaiNpH-c2&R4f'Q6qY4f4 !uJpkn8K9*a3@X6KB)oU]ТEb-ao!aMV=Y=9}gQ 敃SQ ܐg Eil$1A1H1EU d!m SqEKQX .M*+ۣaU b-Ai,7H-3KH Pv犫 XL4(,]?7x'a] A䆦Dv Jclmi$L"Ώ岍*T! K ؅cKi0#Q),KiC(qbʕt+4%>T6ds|Xi;>p `{|D6r_zF=B)k0PHaEB1qL$TYVK9*@R U!JId%1c Q b$.1*R _z,0D:3B{HC)96*cQ:Ćb%Зb /$fF"0y[KrN+AׂDAsM=@Q-0J*C-2J C!֠>DEd̒ fe si=5B:RgxwT8p)b!yUH1D@yh0.AB0 Ov`Nw4;o2/,n5|K7q˒&g64hʟdAD(rni`LǴ-%#4wL:$-5p+1"Ձ6jYqX6Ce PNnI@᠂3TLqDN H!_'*8{`"zX2"xmƭn!&1rb"DJm@Ʉ`h{4K3ȰX5X1 YD?{W۶$8K޷r^\'HRC|Yz"K(M7dږl(b H"~3;;3;;3[a5JR$ %ĊB"'F=*X+1QL aE9apeǞjZNr 0uQ $%-$!Kt7b UTHLar#LIRaҥ8}Ktht`H fhtBJ#iBHQ_~ѝrJϷ2|o1&*+p.! 5Vm2Ժ'^(.C%1ٜnPBXFF'VYDДwGäJi1ӂ3K3J׵ b}Q9e$1˓& i! h3t5.'fMm>?dPHиOȹHP$J]* VpR9O>Wp9 I9}r[$: K~Dq@YV8Q(*o;^tٓ#h_ϓ_>.^ͫ$_o4c'[ǯ1b; /B87%>]|ۤċsCXͽՅ?~{T|z~1xq8_3r~ E\]+ 1$ C W?$;x%b_EΓý9wĎȖ1w^NGǸXb|sU9z1E@n#r(u`!4QvA$p@pt{D0bPBfatkQwy9OV" bܛU<)HΨ0Th  Sܑ@u(`u @P֗h%((PiH"-"#XBE)Tk,/ ̧sȩ$x+>%E͵F9ݨ ͔(hAI"܊L4`61TKenU`wA2Q9a<T1pwwRNUІH'mX/A@`"Z00-mCϷ\է_O?\G,dE_EƵ 7G 0&8'8z F? ?;CvmMܘFyĩV*eh)#˧qs נ GU+oŇ;Kң'zڊrėRC)?oyWjݨp|h~b?<< 4(C-7eݫѕ${:h_O IjG|q\R{W|Z]):}'Pq(OPPiHTk*nCg<͸=UZjʆҢ ”k3Z2 _v;ի;Rzѵ;Q&u QAz' wGayچJSTƿ|W㳆cӵ{G>w09܀'v9;B@Ns.bd:֒Γ<'Cq9ՎG(0w. 4tVtu;bP}.qxe(QB n<,幝O}8L'ϧ%j88oZN|^|^th4.+#ToVcj49''5yx?0L@\l<%n^;' 8VJIɝaD \mxZ?\IhfZk=6,لz`N[#1C/9ʥvRI~XWapia!S! ZIņ4Ul`q@c'>kN.d5l@78Ul..ǨsZEbȚSx Zԝǂ/rY5x1}kqH1QH*D;@ )AY#ЭpTWRL*X0݁pu#F%02d⵸y+|>*&r^GcEs*=$Z,zתV]T -Y{:\Y7`?z^$7->Dl6tñ`XQo#׌ʻy0]t:.O|M-sDnMԊ<-,=dz@Al㱇ag FLYwu5De5<׎N9@quF|lzeÃ":ͷƕ%4PZBZ r'?V@U/Qw  w3_zNפGZgStz\*0q' 5tuCL%9tF,>=` Zk ۨ5QUto%6z>K3'>(b|2E5rAg|U?ہ5}exhɯau'cC}{]d+L;lNJ1X!L"EotøK(IA1~?sR Dq hIִ2uabQו/0LZ` P2a 8!f`kV6Z9݊PR,ټ9+|͜Bs"h.+g0>y[)}#3#8md|'ٰ BAjyTAV%!r(qr4bHƼH6zV{a-N8&qm#\NJUi.wrsg0۱48rt)'U<f)[߽xqUM3*~c\T ʱ_׾)A>JN'r>ʓJUH+|v1.־̝Nn_NE e2|ӫZA})o>qw> \3g&YCsm.ـbxx!iI $?ròz>Y{up5q]]HY0F  xft~Pm>W#%Hi@]Gc?= NhINX\۟mlwtb,¹Ն^7/= 15m1TS+O헮eٜwR}fVm9Ehy_եӳfUE7^!-8ۨҗ.ry0 zb'r-zZ<َd}gTωs|-{rܘ\tP0|`A55a"VRjٗ$LC*4Gb ݊0okذ}۪Mn\ ])t.tc젰BCBb 2b)u4,UVeFY=[njdyy 290I^ 1&) h-=mm(R~(wlɑ_阧mpU8ވw$+PjvErzY,E6;E3Df"^Zzc~S_3pi5[yN-FoHծMj5Rl*F-^jD'V+OWQ0O+f^-18:,ǁeNŴoI9ז3) /&5!=?:L.>v_\si6jF*(RY*HuѵNPcY^60xWWpMI n ~5QI[qr~#Pt8 |$9˷9~W U I N$T!$W3Aɒ{;Dz\l,v*'*鉂M,1r:VJtT_>FR( g4Z~$UuCJ ғYHFΛ>| F2Uɘ{\U͂VD3Ep[O ]>ɖsYUT CWw2ئ)i:LŁd0|RGҗf68;n@E(9įd4_t8#FMzD+CѢfotɂjz-QuSH6)AP ƎDzܨiiTu9ᛡTx{KNL6z9 +$/JCԷ4 A Ghw|-|,ouyN^+"D2HUZd~4|q&2y´wpFU6&ᅻذBNƊ;]>Պ<ɗ&+pq+&KyN{5 Fq=4 :C9P瓀_KMpG)p+%fWv\8O[uV |6YM]כ#~n>X':-[?? |㨸Ŏ8>t}x-pÿܴcrD;('UBlE Ot] .NkB =2rzZyD1EI2^+-Qx>^&/s[ybU\EL2<$^(>I[D<}q[ab텉^2qqJ\?%Bo%( +vW`\5Hh'<0Hz~oDv,'?$Pb޾A87`䱻΁f:pQ+̷ 5t[ o1npyl۲qkdF^R2Tbh쥈.ZQ>-.wa']U$P+!918LR>BVr1B*a턵k?_lڏz@(&f{*vKm{bA=DG0}@O/P<&)=6tz_mD[lX8aG# L2fx>lg}f6us|ιܞ݀+™60_Y2˘Oq" Q>@?r{6vQwwcYqU0懛 s+-v7q9{n7g}"t)^ߥlnhSdA"]~ d`^k0Zn3Of:)jKH-ך 'MG|ӽQM*.*?\ fe/.27:ܧjȽTek D7ϾFUn(-Q`p\{{k\%6C)\{DXlP%LzY;sy.luA'T|w)L뜔SztWQH7{r]W|'`H/!ҋ O\y#C ;0[apeMfk3>y +.l|`ա}d>'&?#3Lic/y$_0o!e(XMRoD-Q 5=r&1Z]Ahc1e\#03]cXjXְ \Xy& g*3O${Z5]nE[ψ4'^UTfqI}SxnSx3"() ^D H E^隂{>  >Jfr/3СPk"pQg]/OF+^?5 B.k9Uw0NoSNxA݆-7@|jªJLPUOi> '1Kh Jzbk?Q#IPq dmMkf  $Gn:u|6R4ʊK.B&._v7)[Q4|,St$=>sZ.gFOp;aL'9m[痑?-W҈1]S"oTV".4esXύ|םyXn洢Qښ: ռ#`) Z{ϢG\=(8[|fCTy<ɆM2>>$"X,wQ:DA#h*̞_5LZEX׊NgUZ>*Spz9(8~H)9v[`J'0Ey!-GEƈJ] "j L3oҨ+OB N69I(bIs)Q06T+؊.L{O XWpݹ)2Czhs-&t}V!OAFqT KA%Kpo">ND:n쳦\H=b ~ɯ?-ӭ_z4B#wH |,tqta@oL<y-ue"Da=ގukwx;& .CK JTiۀ V:09zq1wo\XMdxc_A^hEP?Dw1!& 2QC t˸JN؇g.14=4G^CԒV,u2ؐQcY"\벹块pWNk'w|/|,3QO%=4J~7qG%F˫I|}ewhk+1M蚔E"TKWU'CcqFFQcY-7bN{L^Zݿ_TpPYGf Ke\4,u~*? jY%Up(МUѧ:*f̴! &BD%8I'"ӕ # #{#]@<,bQ)g L0P˵+9}/0&ΘW M" w ^(b=4 V);l| He%5[E LM@i'ӊGCG;>N٤sg&FhUL2__(a'b1/{ >v(LTԥ:(#H*\:R51rc>=*sǀ䮜6\6vH*RҚ͸ " E$bqa8𱜝4 6o[D#6gJ^\Z*ۗV`6Em`]3rXjDfU.8tۤM_x҅.nz#$7>i<3sw;T&(C|SJhR27*cW,2Er[/ֈ\3M}Ёt|rke׸Zmч~<}OM^.nUHc|!8k2zN|<yʝf.[r9~cVO3`|^,Ail~q=И .Yϛ}<'}EOPߒZ?F UVQL4H4Pb#'X MlU57! ",UϏo~Ul;k+XPl^Ͳ¼+ƭ(+{oV\,?Y˴Z̛NCy̨ͭE+;ͦnRdgor&Y5nsPOuM]:˸&a.N|kdOĨRv<kٿo{w@SXRD @sH]iQSxk/J?=À >ezϩ6uF車KJ܂vfpKy}€Wv3=4fbSrZ19Vn~+ |,;&kh=R?;`jx'BBM;#3yF[³gCܴ{`X*r`AT E-w`]QMm)c~#ptw9 |,'ly~ 厐O8ɇ{6bZZ^R-"Wn#3'&W0qېx77.)5k}~`^{h[*N\W `JU2Z=TCBkL8QՃ=PcMy] PvoMlX)#*-H4%l4#5,*LgFFa>;0#j'ly~xDQg~h!n۳!fzV-o6}.MgGf(e?Y֕[wXdJemV'@3<#c}DQ)!Ȥ֩kF{z^rRj|_' DaI4NȖ8g8^a^Տd|!3|,M>e)eY}pl`M-1 I5r|ٲkVd|rGuhe'QV:O7#4q *xkVb#JlXOߨ6f¸'JQFoIzZQ{7|IT3? PRОi$.ڢB@6$*森>MԠhP3anYd!zVK˂A3F8.3h E~V5X=uDO%g+T:tuŪ{58w5-PcID0uyC"&fm"& x(ކf)P?Մj¹8-(FEǘ*jOtĹ⟔61Sf9x bS党Pt9'u^pfH_3&$T+}rO*s+bnVXjO/ךҾ GW|9Br$ #ׯ -05NȂEo/ԩ8h?NeKWŸm\ F7.K }u[_"Pŗ +/p掍Yq郜Q)^'pɊ5Ɋ($kK ZdT^TKԄg'X hjx"͔>_($4嗜AtjV9X0dACgykoMpTk/@h5A:ߓxѦA#AǑ\RƦ4PPd Uݿ XO~r FR %#z%%uiהD!Z\$udz{\oU>GmB ZWc?WadLK4`g޿VK~9yh|8WA-9rb5#A2}MI z4|ǒVry+9U.~'uH8JOjbmtrD2 Gѭ G%3~0n𮜼R繤vݟԾgQEY7F DžJ :S5aQ 3|,ueP ta.V|1FvTSA;Mgib\Ig6]7>S<ϵ{)F厢Z]`#R)MܿX5cq>u3jG_R:Sh~li<ȡ$̪-PCLj1Ɩ蜋A>ua¬ݲ <Ī.ܲU/G_#s\VPYªu0@D'ṃq|w[Mì߲÷- ef9?VytNy?6pFWӳH83pu!7r*~p*Y-k!'yw}&bHms-agoG~ܭ7zԦ%@,^=F{}GJcz5-|6|s=|N>uVi nNL-tDXגMFfCY̾~LDV&gQ}_a4 i(!]yF*]y&%8bD S<+M|L\(ye^}&g .ߩ.`{\ArNeA΄@#'A-qp"3(wcM: `hJ<_:Sɬ Б^XiAku#V{U\y1L>k7`5f:VUxΫXKxaS0Q5zj`fS᧼졋ʈtaȾ dSq'r9kh_,a>& xOWmSoEL?[ǯ:;?;ٝ+Sg_?WKXX,ꜩ\iX-qƟG.bT|6ɿP[}G3lǑe:Tǁ:bt_8AG=($ɿ$Ia6 O4܊\Te'fuϿ#1ktHKrmʝrVOrT#1ktHKN I-*4JZ ,)X17RMZf#51tIMt+Q(>.QG#Kΰ ۼzA'8zaۛN宊.%BGd\՜`1bѮ6L+]uDV(ljrF}讎9Qqc*2gj#1_wȌKf͋+=@lT#1ktHKr;U@Ji‰7rx JYm l#m1_vhI[C*3(50_wȌKfx|0uXJWL_wTU-YªaȽAz"6rr#cXr׍x$5W~@sUNȥĦ8Z?b >Zا'@M.zv=F(ĵy Q)8Z;FNA%0S>;ݘdrD,XE{!{їG>dΖ٣}{w;3X5_C&E 'A yN;`mZf݂x# +n4[V&+)*8vaF:Y%le0H3m$N,tjA %&{4'[E,"I}7o ]Y"Y8ѡ5ٵR`pvYZod^9bh?sW͎r][R>w EK{We./_Tʹ~U>ϧ|+?SIv3>n*el_ItmO?ȹأ4(m^}w{?f4Z4&ڻVk7v@J3?7_W:RN*-%ؖw#w۷uX\4 nKFVKvX /I|~=J5|IC.$KkUN*7KA[0N CEဌyDC=c7h5[n2yi<_7ab3P k0Y5Vb,pFSqC Dw&e h (|FoU=SDG˵A׿[d&` ]2drƪ^UYq2t_V.p@j@SB" 틖IͼܾU ʄ6Ry|eMxbf(mRB{)Gh1gl_{ȊtF*aTPNz+=W G4p&48pIl#)wh2{P<׋[{=*0[}>B4!sFG㿃ǥLUTRO"F%|*jwj7T@spDTJWgc8;Gy81.-'Q1ѡ0y!AH9܇_d:|pת~5Zt Ʋ6fm ^RFG\TH{f@yCbPgtcs^185iah&xʹBDCX(We7*~\e*&anGC`%>V{M[c8lNP%%?v,JV-UmTe7>񋑀`fvK6?~{~aݽ"(r lq^Lug#?lB %cp/7l0Iy>& U߮NCIғQ:2sS8s):MQEIq3 ^JFFuzk^ yDLZ -Ȥ)7#P'W^(l-6H"`5DHˣ08t],Jf1QJG,n<̓p<#8:B0(t PM?M(o־lX,F4ZY=Rf=S`qDG-HA] c0ZTFv |90Z#!(NSC6B5=-cdUțEYlcCӝ޹nJֹG0t](9İ"8FpiX`ǺKeQb A pas$#@aH$_SCg)-K1ISjQE`T8qxLh!ƙF\O e!at( gћi(M\ H,՛}v\|y,< fUqcxg[yQZ!Y=P=r٩<2ٸPWw9ol۳FE2Ľ)5kJҠ# #"X_ 5e7t $"\'ؖGap*$rK; i/S!ӧ> '(~/ tOa9ѶaeA9}ܵ餝moςRp'OK'8 g jina(^xŔG5s"%>*g=߲o'X%T3D1NyD3yV4s v'Px{2P(n@a rx; dgW:,Hu%SHGPIp=eˣ08zxp[=.% S>S 5 +iPS< cWcL9 F"GDh/K"@0О?mLBPJ1Ssc9%\rk]8XIa B 90lc nz̐wM0Ju#v6" ð< 7bbxk-psxWq `&X-sP x+Q3L߫ ,\la0:Blݴ]I31KΣ ٠;P~c[(.eQV o_ͯ=mVGYp(#OG*"[4q ,)#;-Q=$I$UtNj2|bQšJX7>kÃENBx˔v08ÖͲkڲܛQΒQ/kLQ7sv( >^{M4Muh#0+sGYpwD5#Ȃ 87=Ӳ*f:xN_ xoN< ,WP*ul[< Nl-|ޮZ`gѳƅ%{5YUrbq$aVG1 " Ax |QuyQ"]7}IV.Kq z)ax aLbj9an@x 0@6u( :dHB'N~ EzxKPw+)O9CHd"/ HS# ԐI3'+.O0$MlSs'dvٳ KGct`ǂc] 6N|i jwY\  }YJGYp7zi~B}g}+6|Y݁val^pb[?GɚUUx˞u]N2}&v [̺gZ|Ɗ~k:GsT c0@{D86p _fp̷>55puW*FwF\g4L-{!V}U7׀ڸ" Wǟh1~IO菳{tƧџw#\hC'{mmC'ivFleV9K1\epZ $ClnHTJSoɪtˡ]XYw*ĵ!)/nl[KB2:Vv1<oQ}qJor{ۈ*yïU]7/;<*w7,4N22H=9MߞܰM4+po~I~bͫD#~ _azD$ٙzI#\Aƣ0YlGf.xߍ&Qim#]-Ҍ 0F==Z竫њ[{<鵧}7sGO@:-Q3_';r@B݆X}zcAs|ɺ-%0ܨ>+,ZP]eanaclN[)U||!t< 3@ 2< ]-/ohHv( 5ľo#J<& Kk>.Cf=eYeYʢ0?Cd Y :ޙ}f*u( Rћu6mixq?_7DzcP5|NjǛ=Hcp_~4;$~`7ig4c3dC6-ۦzU#J3ZpL.n?6UW Qm@\r;tcPL;g_h8gNg=c8gJ狛=\҂gpy[4L "5Xl <6\c@}S?qlTGS3TBٶNc̷ӁZ :5HlmGҴ|(^n Pf|kD V|JšiVĆ#_Jͬ3@d |ɟ6WLs{V{s2_Y~;n7$1*K)h^~L0~&߫T,I}Grez`L,ʺ?rŶ<5â-S܄1 Γc&Z`9XKuKA̷Zjڰ1lbG'9>8ʆ̥Ww[ do0SP|x )!jڂ }GGufMfXqԂQ$$PaSkKP%U&:{k\s1GŨ\̀lY^[<6OXq\c.7~Fਹ5k$'3e043_3ᄲ=OO$Z#냞|1fpFVӸGp4):\4M#Ӕ)帱|yEj1jF h5p+  tlbS`]57YWQE+s:Rm)c Q[?c\5#.`#"ެ1fͷ n@>5y%; `It ݊:unÝ]vWcDu`u F{g)? F6%Ɔh={-~-9Z^Fۜ h5pTH=` &Dc@4k2匥Švh.=rًqbG{4.QXQ}& [,\T_܊b:ZR[@z*zxfpk4/}rQ `߅ [Ϲ <˥2s9:5߽M4GaIaEeN|6a0e|/EN7??olw~Zw~ _y6> ϸzi\ŃB3&]r3b;5ĤJ1*b KmVۑ,SN?ө, .BB>J7/c´E*ƓT濊2?)-LJ(h'ƅ.Fi2FO˕r2$+&CָmZH* ς>[TJ [wlb&>h{WmJfZeN_SJ"$@~nX[a7nݵފVJ?sʡJM}{gO6+嚃n9_ߛ_?eJWPrfWمkޫzwX)C+mh[/ti!Pͫd}zv6CCy;@ tXƉP G?Nz(P(`Mş^@)FCO s+f%rM3V1de3f67'p>T-$IPa3I*PiƹEzS1PkNE n=_ct>r3+i\ȊRND.ArJM|}fOCism =:KJ:n1||A on)mjlʰwԱ[?N|9A4d_w/j<ݕ‹6E7nj{6&_ˎҶ'ֻ}2}k8>t9WBr8x<ܕ|9>Cf% ʨƏ`s(yCݫ(Xo]n ,9Ww (SBuW}zwAԝB0N+˽߆ gg Nb Hy:OڢDrIFA} w*S?Y4r4X#;K8"0*e\| ( " C]F?4O@F\CА8U@^ !o\Xyizyd87z5[+2df()Yق܇*q,R ;,b#A=b;*c\xD\UF_h r;!ޮG^yuxbx{L"Tt̡.?)?sqS Uɢy^͖yV,mbpEmw엎gEȞX :,V3T ˝/NPbp?p jŲvY|"~j 8?0Ej1[ pॅȡN g;q]ܑvs8,Cw&x*%(2+f||qܙ |AÝWM])gs~gAm,c |d5J KS.$ɭ6@mԏi:yuM?˖W3: miQ%U*`$\UPƄuagB$!Z 7@h2}sVbIeD^}θ98꠨5^I#7‹Go~a2/&5ݵ3%@g1ƒ=%I3jm:ʹByNjIR#C'4ƔXnlB z8)M 7Ũ(ghT [>Nd^SI<So# :4NE!Z"lnBYňgL BC3Jk2 6GZn[Yi3.UcpK{X, IqRm4q*})bplcji?JE=G q- J8zÂNՋ/Fbf1I.apR[pdwC(Z#=aB~m5bm}yt+wXYQ>^{66wަx7Dv`=aD3dwYu2MЉk۫{(qd2`- " Jt+BW(x⦟ga&P?NG( qCGuE 56$ (;DžavL\]4xKr;^,IMC8f8MRbfHJ"tw"Zks#y݇_?~x2.Y=PldB$kff~{:] /ҘTϻ4*4KnNZ[iC+Pp Re*J"%DpvkoesiWv3 V,C7ז֤՗ec8k:vuBVWS>^5J, GR̒2&<=5\5+m2(njMrFmւaWR\Uf' -w`qIE$S 5ZJ,Piѫ xʁ W6.9K7_̆y&g1| kY e}=N8{\majV1pI\jsFZؓԒx3OƘD^+݈ܴ›[L,t}ܴcrߪ4\˵mM $|uPzFpeKp &JjB[dk{v2 ѵ%sz>iNܨn96.o5D]-4UgĎ}Jڵ~kKzsQ{?]ײSsr Dj\ xӃRSRnxEuN8YNy<og`$@!QXXe#S0VTZpL;'GXFι&3qSd5;mmF# 8{Pe$8*IVL#kdYI%! G/߰θ{\d),8d31v޵6n$B  6m$` &7medIi{fWMRea8G_uWWIםZtBa?SMTЯDDEZ.2SY)X>C43#)l@,C 1>tĎiY|%51 YSfNuT[}ʵGayV}l>€CZkOR@w˧Ľώ s$r0F|T*E4Jc#:)'|K9X Xknyd(:{DH) RE+v̚|>EdXk0yҜ")dU>U'Wdo,^CL 3%Cߦ@20 *QQ)DxcU~vW1V%SI(9!-w-xo#DpT&uLlG'c\j zek0ertut!-e8(ic"Pٺ<ȡX=G`|AV*'8ñscD L͉c"C hg 1:&#It:3ή],t(Vyi[0BiĬ#H0<i\٢CuLဝ(F\s:h;QThd; nBŃ]\Ic#ie28%qe>YfLY+EMwѭq@'/>&P *HW""ӄ׹{iRn*𾘚hn'ŇhEnc6Rx64@; pBOC1Fd8x(pU{m!f b gHxCμuҟ ) WE:x܊yDċdT" E ,)29E0AlFtctLN~\(4*K#)v dU@TS ^>?fCώž27+%݌[cRѧN!L p+.-ԙ[ 86;8- xĦ#|M%%w7ǔ#kDc` #70)/,u寙yQmoU ~Gij#[ @ #1!p>Ǟ^nkjuBBp3E791< W 3&WZ1iP}x$rQO˶5[OD(Ǧis/3':VgN܎/䎜^tz{kF:Wn@'wʳnXf3}'dGŠpSt?}W|x;f6/&Mn?⏐qFHB-R,%ҔH-h^6eY?P|; 0`^5w٪0 ތyľ *iWl~:4w5);p1f0Zx낆&/L(/.!)(ň73v:⏿a-xbpy\n4]p!؛] V_0i?.d{&CzI,ΨKI ,XZY0loә?,N>,T`B_TA"xt7n}onB2v7Ćd8{>y vV<0_$귁ȥx7Llzsȍn#_yߏ+,w7>8f;7 {~]ې$>;.etrP,7/hIU35֏~ Zڂa|߱LJj)_^[;O^8YW࢘WȋWZ2z W8o|W|GHH#ci)ak1\;Isc36'eHpGlc<< B䒳 @DoR`-c;qBS0vF[2‰uXT2bv2s3A0ɵ r BTx3jg>LgXF= NVfybN;A19bh;O r_\z_v4ses?tQ*#hM큮עw=evi_tƓ;"'{SLsYaQtL֨j{ (?~t=ܸ-t#FϋƇ;e:RPYŚxf;?qv UW%l;_hjSUA'N@5B]`".0UZ1=8mCKytkjF os%82E$-23@QV n<St3:MFG單B̔)~0r'_~* e~ǫ߼jb՗ހR#V'ߘR2IuEhLYȔXzNvIXt7s!5@^v,ERe P̽a2czQO4ه?si U c2NuHgd~sVk/Rẃ [PlC]r2o.y6oM24?F1ZleF͝QFg\L9`p'!\j`:=:e;=Ζxujw3mx8U[>O]=C:#7bl]Ma쵷}$mwe96Z4n ]GeK0~N'#m38"l\<S'&ܛ!~)ArnPz+)v%ع]<;?9 ̒.妗g)7'l{/ ߹\p:v#E-hq8|fe/OVSe{"`sͳ%t\]~fBebW nj0;єQ&m<'8l|-Y6t\fkֶ{M[-$[ݡݳ(w{k`N"]1pnnfP|IF!£7_ſV /4.ť2&te$y(ȫ1|s\IBXD~A" BSN}ΪU*ocU;RS1ϙaDPHŽ@"3G3iQ*TBdBWn` oK#yo]%uJXʀ;9_KeŠUqxg78L0Ct2Z6cg-H #-9CN)bx@A_jxIp*^ #_f:7M inY>͚0<6;Li#XlŒ;^uԡ cc)G6(`)$Ť:/+ߌ?SV҃47Opp1̡),3o)x+yjv`ro*W v5uVS׃~s;~otD"'2y`5zP'x ik%8?tՙ!cFY Pc*haK(F[ ޺T ÌМ s$gR+n'3pcd| "Od>;[;G[fDbi*O1UBvnVRzY{f"\M6IJ4Kh|P4hO- rU_fT&& ֒#t4ARY_jlI# KbG%_Qѽ$a+hB$2jӫfϏctd78Ja!3Ȓ*)%M7_'_ \yH?h $U$U­o~3~`6f5ekmā3~m*٘Za:ߊ%u^'n8xj yDZt-_*dPA%,[Xv^u?^ep%لDꔼuJ:%ocuJC"1<0*pM"99iRiq+2ΧޕF#RГxDa1cv^1ѶZJv.*I%2[EFɏ ^ P'oE{mn;rk6F?E_7F)b~DQP$EEըcmqb]uE%id86uPGmcM)WN:¼eS[l?Znq[vۻ`7x/?y])ZZuc-I4ZW@Xn@P["!S4gR^yխ7Ex4ġywzY;90tww= 2z_guO ;h ,Gx/iw%e ɖWڶWڶWڶWb=[rS;kHt֚P-JM<т6(JGJm-}_nzywH8g-lSᔓ`b;1|]1ۏ3vN'ec7cwMyl'ZA]4=+]oZHǴd}ovV (@o>]mlf$t~3pU9;/gp9zm؇*oI;~jmi7.Ë])"z`PgI0͉a&=zm䣯h 4}M 8' Ag }Fcƞg?ױgNMMS?mwՃ|t>nܧ 䌂^@YJs*˜׋)W#3'hDDq ! R%D^FOURKMh1j  1?.xvtѲ8+T6+cqTrrL'H Qw*VJ+pdЀ%f y{2d1d,ig6++?EB^^jK4Cl0 U!?~9ֽʤTFhz%vAHQdeGl~Y#}keW̾ڨ$b2v"x㱷k%ӭ&mRM#'Hco sdmf $QhwJ*֍x)zRyEN"*3*)ZuoE |EQ{JTCFHQwzkTvżT֨]E+JiB)8GOYIB"[N[F%+A$tO͛F"H}rD xwb, B f$MNVy+Ȝ |$to@w%jj I =3Q,+-TE*% zGkz|TApp *hR[A;bb#i =<cdBS6CeNe hsٲ9EB^vρ#̮]J 6h"kV@UH~T' I Ӗ1&g#c["x7ɉY0 NMW%TE*#ͺS$}U33`ғE{~>E!>Acۮi7|K%_璯s׹\u.:|賰QofۯޟN|obrK$qzӓ=6j^1r^vPk,l9P#3{kaHɎR= "RuTO*_Q-Nj^ B% q&L[c+{.x>i6C2>,+bl ` %&-;R1Z-Yi1w__kS;/DXԓuK7OucM7fRLi2Iߏ_"% "Jm^>~11W _ȪPyCOF~34G'YOa5]xtοtz6ʋ]ۋSvGm1Mp^R|gO/Ewhsl[J(j']tʗ#(s 1zP2go25Pc&xd<{x3y>|trtc/N{δV VgȪ+v2VŽ3Ɉ9Z+][ثV_l_ʹ XMxd/["WHjŕlnF֜k [(ͶUR۞esQsb]lɤZS[{+auݢzIA*n_/t_> =F3>Q!"Ag[-Jf j*U&z; (,"ِCLS9*`*L#ՁIv8ֲBLl ݛbml7dc))Ԃ`dtmsL RY  阠m-m^=Q4IzN,\hV*<_2r:|Xɟ;?>kWͭ5z}y m wXC%V{!w,UO?,O/fVzr%$.$Ctv޸ov(\SGٜ Ň>tဵ@EAUc @:on/I5\B`ѻ 9IdU1gȅmVPMv"4;kj#-ƽIۯ$n}qFO]_84}.n}RDm0Ԅ?S硸VqNFn Ɵ{JE0EEqµbFD %yfxo]VyĮeF)gʬރ^lh& :TJNF*$==ܕ}}wfJ̉I^׼EP}زhpOO=Kz?_llګ ֖_돌Sxmo1[`lsLQRG>xql{|aj% J^8`u%EqO$/NH}ׇSxOr s}ٷ{6WvD*OoM#w6]+|Z'B#CE7ŏ jy]ӂ;O.F^?ȷi;{sO ~jOt䛇Dž6Ln}8?=! b7SAs%|DpݬPoGw sU0晧L]2=>!?3ggқ5/#9Ƙ( V5W&~CI99[o)^~[絚E7|2~||.^3z&4+RUa?ow5mM&ݬx_8<.f(Sl,He}J.耘 '2[ƤR.霬 h$UJۣ/X}y`iSt> EARD :iC5h!ƈIQ̣h:о5vțJ6eY} r,yP'uYvfNC_ݫ~tv[1r.ӑb9vpv#!j`3R&,U*W4}JfeU;/s5uD{uw%mX0|H :TULԡ{5)vyH&w&3E&E$/w'Ȇ+LW:6SRT*DOeQ\N8MLg` А" @%=>6 mix`,BT sx62Du:(\E$\ExHI% 0yLAdʈ&ր`tdDYL"uWC-GW_6$ko2܁ s#ݙAƠX. eLlcQޫ \iӠ^RysvaF]Zrd6;nf{ɣ/A)M`&'B>Vx[wLx`oY\PC$5jE7قt[Jb&35H:{o-)-]Zw VuDIVohpw};RG>qyiț#5TK}|~wV|+|gHv/F`/ 7":h]l]f+\>Zt=ڇv^Ѐ 2)1шDثpLH-:Ƥ+)V|p)1TX/aD^#:Jgiym;H(Bbf4"VxUZ[ k]TR@ _𠘛кu5VqXa :q` 85h 4"JFc&=j*|˜xYp.)6&>t9x%/j^$S-.jxFl_i,BkbDž?kiILQ^j\dj=]x;!f!lbdx+M!]ب͍9ӆqY&lM_rςWtW6H3p-Og^6o5529_] x$ )ǃ S1c2b=6MVHKDgd{gpcckz -,Q3' qfA%*)&}ׅ׺>B6=uY9չ-7;-W;l({*M Rsuk)ZJƴ9j|SNtO%x#32x5sf"݊szS͂cddjN>,C'&lg 35Oj FG  < fh/#DH鴤V 9 a"88f3 BYG0 ('ၱ>rDANZb!iA8XG!1du A9,a#ֱ.0VhvJ]5N!eiX{g5ygξ<ξԊ~*J:n8}zx}|-ʰ0Bp44" +4"xgPPJ@۰9 07pn/!71A̮IO`TaJ[UBY ֒iTh Cm9_ԺW_6^B/ -`ÃU0I/2{JLO($;ȣpYn>6Ma{J /Ӥ8JϥTTޟ?3%TrdOy& 0䇏_5[֏f_9Wn)UV8v}m |!0^H/w맿s>O=?5%kti"{NZ"}?H:n۔m|Z/!gPHC K7>/CRY}Pқ==TD#BAHq#y¨74#fӺѰy8Y"=!talƏo0NuE&x1R!텈21 A@T N[eݩfS̭^mv 4Em^9 `N"&Ag6UEJzT-[Ƣc\ 1TUb[]D":2tb]/tS{\(wp)BSƁZ!&'(,8ZI}f>V㮜:P.~t ỾBǚaϋ;[6LU"Jl] C3z#9w{=n|G#q{3LrEEpa0Fx](ʙ$Q`ab]&D~J䇐6f6"" C"F#Z6Jsb @ Ĉn|OFqἊ>(:?:/o{X3Emr{6ϡ.L /I&a)ƫӥкj]Z94thp{f!Lvxz^6nG=?ݿsoEiϪ󆎽 fWfu~ϛe|͛^zΦnPrrn~ܜK}K}s?dqJMwkŕ @DhQhcM0&s~S56늴/p^װO5*Cz!OjRz\TC>t*iQ(6Tt_ŕ8ڵmg#3EZr}eq*K\VlMQ1-f<ܴW?w"P%SH, gzSW>;d"9㲏nN@x\.8R`6!E3ߊ0ir \~n7}Vej۝8"+vʮ4|~ވ+ݟ WDEe"| n:$7AЎǂ#&2QZ!;=!njG")5?}Z X,a0c2.e Ic嗼r-|_W}\}}ovyn~ S)mQVT;^!'5x/%fŖed\$nNF $ ?-L s-΍Sw5d ZТ] ކ7-pY;y>`nd5;32(˅-]^wL5*{uv+mԫ\jas6pζ~y.l\th VxtѤl8KȌDW#3Htr1]yf>VS[wLt`I4ቇTxjԊ֢3hہ1oLfjRE%80Z*ͻxt*84.}}5xڜp (WEMIk?<?,Qyg9sbM :JPB>SK>jO,Xp6NnM"dat$lE&Bvq5V8n=>DC5@v6]èB:]zwq )4V?A@u6,eM"!i[ sΓ4/9"/EQl0řzSYxyqt<0Ićy8ͺ&<&zyXe9 >;=)_Os}`A!9?\2g6d(bqv\Ͻĸْi!Oqe1Й:O'?>jv~[6VRj‡;IZnC/ o8<[;\heiHWQVvn #쵨9i2͓s*p? ($NOќytf3ami^R9)Ϸ1靘{';;1_ӳz͹ʿ?#<Ѯ _a.Rj'S`)mW"pnN$x s$X-URR;j]ůq|ȦQ֋çTiU_lU- q*[I&"<@fqX=OI.eZj}m;;?38vtD舝Qrrt48=amR;8(P6);O򜢉j{.&2~ŷie;/ҹ8( tś*ҜqI8l<+Qu0ϼdz5 Ղݷo"͍2Xއ{'K7{qziʤ“%B݅(> N!#~7_wGikZ}N#ϸ;6]!U*-O^}N#ϸyF7 l*0nf5SG: wzz]>u^@}wU>{c0yӇeq^¢E궟돮΍n{a[KىA[HN+( 7D{)HvuO 3 S |734 ;!<[f{ S赫n,]]Wmz[5 fq\1}b" |YdVXbz$ƻYFo6 ޕUXȵ~{ϪyOM~Z7렡"]4Y9ihkع, \\\ڏIr618/݋f[ۭYm`<`7#ӃV;~wEy;?~ iIݭ +dzAm/N7'9- +|.Q%iE@¿^ڱın͚/Y/1(lf<m#K>|;:{"&K f]gSbՃ!+^]B;E,a8{7+>ztyD$o}̶w`3 p(g^~?:>p z&(YC+u:e3o#%io?R:! nrk7`i8 m[HߎUL)ttij';v-;ڃNJ-WJ.#+27{,U\M#+qCTG ɨZ~EsWa m}bF{Lˈ~p?y,jOͷ)XIj>lv}d+Y~2KPKAkŮi΁eVQhdI2h[ dUo'54WwMuqwȕ~ p_R\XKd%ɞBn"Zei%,`M$3{F,׽r^IEJL IkF `b*e%k%KVd}nl%+A7Ou`QZN L)-% UN-cNX /TFJ] 5hlTfTZLk\En*_1]9娴RV D#KP&Z\6y_a3*e$r2scRDG%Q) ִ -m7g}YVȐePhZmpڶMjIih1V,<3;DƹغEXku&_m k0fI`p6pNHopǬ =ǰ\TY8B*Hk2Y->6.F)kPQd$(Q9Pm-PXmQw'WA p׬Qz7?L^rod(+Sp6Z)XXD#de=O)01!X GDd/ E\L.lF=I#q1锴#mp2aI*L6x5TmORrCFkE„<*tL:RȪB"W K"n{xBR`/# 1aizJaF'XbkV@Ql`ZS̡MK28+BIRX2AWDTaNcјec*TU&1aad4ą%XCaWΖT͘ϘcrTL_5j5Z۸Qa[iu \cl(E HZF(ؒ1* TJc{S [1O[qꪠBaI$#<$ ѮG1$v zJ 3`,a;&L̀j@oȻ\I1#~0 c)HhePN)PpYC396$Mj8)dDw.Uƨ, 訰Y3a`;p @l0 ٔ| pȤ@gLZ *9mLWi_iJ7eb:# Dw3GPFY TҤ3Ky{h)R6N=*Cve"B.ZqOJ"V_Mh=5"iX^,.zўjU!@H/E j (Dd(R(6 je.8=5m|Ov'SIR3"mG!]J1Hm/&s 7D6y ܗU,U\R"d$e@ PJN%8&oC@FKQ DÎj8.z:\dshs'Ō+kFW(5# Na9RDڦ pd*.VOɁHmF|d@RPp; d%or0碬" EU ҉䳦@k6֜'tEXvH'YLgi&p Rh[VFjWJ9S܎޲({#N 7 h otf p\4/jQ2`sB`⸰렍{y#7>h^Xr1I\2ȑ5<:[LqtJ#40z:o6nLQAXu 5$ZRUFl `LjŒ):$IQ.0wz! )aI%NOHH@qoڨגan$W"(#)UM!M^g;3_?`;P8lP~Ā΀mg@@u ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀ ΀z (גj%Lψ q6L#z&s~:z9L $EFLLLLLLLLLLLLLLLLLLLLLLLBR{FL  l@M\υ Ԥ_=IEg&'d[`np]nJ z>m'RwHy8`{`rwֲd8WnZ-V˒l"HfM*Sd=UF'""AFn;IͧE'#10g10` c%}tA9OY!S;O9i6PgKxiR ȆZ1j٘ ~1_ϟ~D܃vjSz,MwYӣIHmm,SL0uN:dyA+ZlqV+b4`pI8 "1Ե׋pXD4 Go|iMR jxLPѡZ g~qv~8@Ie S F 32ϤB)0#1=2bbUs}>!Sy6 wdI5\vZZJ4oYB!EBsUm{M Q3`NA\af@ $( ~N\4a aS((~S|طV%G,6G #2qs&X2 hB/B[=ˆP9;ϝ$9[M3ca -de0H3MH ~]㖹A{xwX2^x6S&y.e ayXSp0I褥2,gI(XmARCDEj%Y}.HA艇}y[Bk?}ko_n?XOa p i7ܫv]W=:l>օx>3"l쎮+ѴfuJ{{r&Cythd3uNO0_2kAskT=L`yнsgZ"hiݿmEݝ7{r<-1ӣF l:qï޶=ٱ%ן5¡-AmYt~/.:HP,@Fjek7Mߣ&xnm3''nK[Yؽ0rެK_܊o~"\Is}1gyB]ʙq~fG3c0hgơgo`z~5߀#et^ as[#!ɦ)2㍂4$w^pEFr|vC=[w.˹|-ywqZdh+WDm_U`Gɋt{4$~Oue5Ry4vgRꏧ㢳c0_vP{hm;^Lˀ>77@_ƽ_zQ;4;B> 0, Qɟ}Wsg"ECE|s႞LS˙ ՝if8^ϵCu=2N4)ۧj֡.> Aj:-]Û AJ͞]?|^}p5p]Mf x$p{ 97_]^g﹖ަAM%zm{{m76HO0KkYћ7g7]8;To/ 'aÛ*Ղ#ʱ}kpg{aE/[E37hfQ̢S5Ϻhh^]XRR,6N^7yd DkՎ2 Dx㉎F&("8,8R7j/X-[D' QQ0j*\Z%k1$QSk֫J0iK1Qq A$ARX´T|.cgc3\ֵ\|e3lj;P3NN^&e6e_4\0-2v2/%U&\L.e_ wO I@Dtt;WPDL %%"+C.MRVG `;ؑĕgF7-xy׆MD1 "qGL>ܾ@6Ԩs5 ~tAbKQ#Fp"q,2\ɔy$ԩ.u  |f+$WcX,%VY`?\rAi~kB8?֕ㅦzNNb1JҪ18N·3Hh!Ka'"d[gv| 0QG̙q3w0`Q ort=XtDR$xH RO|FQ&XސN] gƛqE9&wGO璚39ymj>u&S|tE˟o{ui$ΤUOUL[>);]K^,J9a-,:$F( gZNR(jAĭA,!¿S^y%[h}Y;f4M͓_7@~ޞwk,qa6P̜,&I5A%X|ʐ7o {X+4, wZH)= )D%T{ .X&.4نXѱcHnj&KF1I*‚4eEryͣ(uNWf)W*=uwc-[EӊM;]A)v0˲UKN"6:>+8s/' JcX(\11CKx 9F-SFsQ6qpDFAdґļFx{؜!-X-IɊ@0&n>n--X3b}mͪ$Eℋo &Oѭ5Mp0q ՚xlqm܁-2E+]4!R-95(o'۠4oS[]NbW2oſ9#9YWw߀FL8lS|qo0&ɛ"q (p%*Fu*輏jsuu:}K'Qr% PRj,h ̙CN 0Qb!g"R0cb) tVkwP KMZq2~c0}r\j5ӶioZWKR+EJtd K}1yp<0Zɹ̣$yGHSjv/%!3vT%, &/DbVzmCS(7.2&0O(DŽ e;hV=9f9ngWXz-LwmHyJ06Lo ^ږ%ERw3ţ#[}cln["OUuf [<˻Wjq7\r=G|R"a;q2Jp䵦VDC՘}i&k^Д<./7B.!dR/d@"I\dp [NQZM3TJ+HP#jBT 5RF*H51˙s%+H:+HP#jBT 5RF*Hmck 5R2W 5RFlP#U*BV%BT 5RFjEd %5œ-wӋdunSҬ bx aHnƸE}'Jr/.N@Šs>^3a_!Ϭ;YH*(.hA-&Y圍QXbUD\rlEC'Sb: *ÔqN!F aͥA %CK|^ |6o[ v0ID׹LJC:7o7)Lc~}pJK$bm2'kHD,QN& WBk h+*+ MRēP gNϝV;D!Ra .((xaY)FN̚&HHJn4xX5F3pйA8ђjaIc ? pˎH [ / P77>b+vd=!*: ;`xZ] XSl(8*@âr0Z~aɘ|2CTpq A$ŁRX´`aVHR qIT-1Z~o[q}՝y emkA:LׁjZj-VE\"sUdUޢg8산'2R\ D&Kj@'2TWw=AB39C`pHcEhrZ$ ~FVLA$8tJ00X ļFxS?lEKR" nZHẻGO (WbT(.cM33,PK]+F+Љڭ=7>=OoMNO`ٗis}|UG2eFeT6q |C& eb$9b8wpr.R[]04!CxTeZX;VNGzsZw=\NԤDSm1O4䂴KK W4I"] զڵ`M16Vx$% .0IK=r r  ~EjeYdLΓvk^U͜6l#Sp˕ye2lfCyOzȀ?>e:F7ϣza;#ލ9lOn6H#7 O{h{|?m0[#b6;05^vijt;]~kRDXsоʣ1O+*YjϭgaG8IX@ˆ1F"cCBԦ)窺o*v>uUEkNlߜH{m2Y\a[-3YZ Q %xWyzK..K[XBj8!9mpU 5(PD "Jb*0#1BMCL1&Vh̹ 1EP&x,۱rX39U\7$F &aDrf耥ڄM(I1rzFIz4_̾~{ jixfmڽ0ϫ1[7yzzyp*04mD#6-a6:} G?=6L!`SdeFFC4[mMr%)jR /JJs\ހ@IjBqW?tGO+gy&Rc&{/ӭ]&joo2#M1!N1CƋj4NΏ|~@4dg6b!gX} nmKeN?=nE#Clg2vK8Wq]qrLdnG^ \/QT(0[MAj( 7/ l,{$nth) ^2kecW|D2`V<`yUB;̂U╅`w, /L,*qQO:RKcwM\" %t\v)nj!i+Wۺ?t{:<ϰӔ`cOLB{|l.\}n`Z^&xb MtSn2 o;cJY٤~Y ɼST:3%j}=n΢:|Ig(fE-kt[;ӧҎՏ-tQQ")]ng+keLXό'!Ji'2$9bO1Zks P%fi텳k|3څw\Eoۚ Gr^SX_ sz rA@0!b $3_ dVCHf*BR` u |9\z1*SURʪޡSE/H]\ҋr5tuw\3KN.F]J9uj%JVuԕ[W[nۑJ&?FW-+tzs=q뮮[pl(|ŵ`טѯ/l`7'pD("rD5ׂ|0p8w~5!L R@0gbt&UD_3 ]Mg*5ji(^ξJq)*SgTrlzJ=S.l6}?\ ;^b lrx'Z4̣7hFa상e4;32N4l mab)H)ą)p'g"I(Dpx|*ﴅa줩[9k5ބ @Mn͍d hdrPf L{`e$hrKk` Jh"{iz&d֚ZYk2kMfɬ5&^H2 =bVB1۹q/-1O4䂴^ ip(MHQN!A-)-)-#.a0IK=e2YΒP9'FPU&"R",0(Ek/?bp0uT҄؉MfpR9* W#'Z7Xjm>w?m(oͽur:Zc#͵Ց9EGnOSfζ֣[zI[7rԡ͆.|6^j]y4Z.ؠ_ϰ|nO{h{|?D[#CFon-4gl'?2q nx]VcYfq sۡ]>O+k{CUonN3K!D0.4}z憮,'QI ! oMcR`ʼnƜS EhG^4{23d|] 0 sCGz.j+0LP;$̀6l:6Wˠ-&13挻MFKxo=mx25LD Z_ϵڝۜi|su铪_/P.̊Zֈ鄷Iw /Oǫ[ćzAs(Hg8 G^kjE4[QSFW5/ih// XԾ_Ѿ_{|V}gK~\XTg2s$>yU [)8OXX ENrZmR[_++׊\hkO\3̄N&N GٻFr$W Y ݞ}.0O3O]Tr7R.K%P.[bHEA:KdhHd&Ι٣ysΑom93;w+tޕ;7::u$ ZedVH.'ZZedjVfUƇiA%*ն]5>[wng2 t)T hM[Ejn/=W<.x!kP04QQ7.Q! S%5IlE=Z,3jQt)xD"XXy%zFb<hiZ/Gv8p,5/oj pr[[{j<-Z[u,0iĴk%ՕQt WX34qR)R'sf3tF,.CiW.IAcPM,18X-HncSTEpSS Ldm2,D:#v eQy\ՌQ{dL~p !qd θ rm=rDLF$Z \R--k87wB76Y[9%ԆTJKzmջ{vvvp+Ri) <1}JL0SQẂg|P{~7V:J <S݅i}6[OuӏZ+(5(V'PXIfwbAZBPf3PuWA?wǝSo=.NLqҚoj2ɵC&[3ґ3(:mcNP9Jtzo@6E7fCNo -q`]$wq;ZBEqۥH+@˳m |̈>ZJմ˽g#eWxzAe{6ma"[/j/6z+#EzEb1_MqqV8y 5kt,d;^$N6p-N;㼘^D~ˏP`ur; Sga0Vٔg遺B~1܋ٹG0Ųe,{DcA׎`=8Ù2"j%f V|~+LJ`׎]ᴐ ];9 (EbhA.'DB0*QYíN*d~!Uw)UtAz\"D@~ FL8C \4j0>*m@.(,erVQB"i  2O}r\k҉"ڗ+Enw-oüں$y|).ɡ.ԅ荇5 Y\ 'ǵ5Vvȝ^ko'^V}#t3) PMpv 5'8;VFQ(J#9n'S' Β8a)U1H*TMJkbq@R4[q6oy64\ w[=u˟Pn:'rE4OfwFs烥K%8 A1TBhK-rx*WjɎ(!aIHe"C :)yT(L!/YdKYQBx:2;1)!-v*NM򠀥,H.8!(Q.VQ93^B6[Ik%SJUK bR<ZE"奓L]aGUA[qzS&HCcKri@UIL#u&RW-g0E#B[U;o@)F2b^#<(6ˣ)Y(kB*,L9-)ԉ9W}署>l5k>WΓJ >Ƌڨˠi` iM=1A%9 SE"":SC[-"S=.z-ԇ7$1k[ \bo[L \7Y: Ȫutpt6#l)-x#cR^\_:t蔮bF J=>XD54rg2DyMhSxuxS9R"j]⼔~u]>6N~ Vky;~B>5e~=!ĺSm|ɵߴPiͥ_r\}>:ݛw)6- mS ȹ`牂sn'›mdkԓjzEݡ`&t*z-ݡZNwTRvݡ+RW`#F]!µL-LJʧ!̯Frp<.cpF+k=P囁3D 2B-D(RM vot||/x p^%D 1!܆ 'نjmiC_2OnL}(f3Y0lHԉz={2t ul8/S;͂MM1W\^Q 4j7 "'T2B?` T(&$"uKJF]eru~*S)ZuՕDO+RWHh2tuZU>PulF+_Hyh kgߞ$лQX,Nf0XŠCg? Ǔ[..;S d@-}5oʩLLEQrj:SX?R)xS?{A/ܩ7/7dA@ЗohYP48]t44so`jm&W\=сmҴC[-#ia-LS 40M-LS 40M-LS tg !iaZGijaZijaZwdQ'Jz`mT.8캠odŴX*gHŸNz2muo׭2s%fr V2΅H©& O5Zks 1%ትVܑV*]q:[']?ݷjpJ5BFmCgroݛm~o 3z*ӛ4uUQ "\iiee0KJuv|C;y %з~֙碥|! H "Ieq,$8焧Vl ]ȸʲ9_G)D;MM.x뜡TDB0*QYíN*do1rv|ks`К]_߻mcfpz ?t "6q6BۇΣ~գOH)xMt»绫ۇt~vӽ඿z@;gv}1݃O qv~@5Ǜ] _yn_;6>{U7f]9/ .c_-ƾW6̓^ەyp?&J0ʺAמ[뛟78^-Z/>s0ܠT6j"1Tlъk 96 q~<0=C9Hxpi:"Neqʼ E1;jV#_^mAdISnaN)#o9ALLȌgu偷6K.QGODk.29|]/yw  +rfڣh(o7 ؗn埯ѯGO@k6Җi/N+8kmFROmZ $с>ipTȬӮmPvL{5iqibv8wrDI0P>sh=J4[|<0 UYi#*^ZZsDEf*."h*DI H^hh)Υ &>&r(K B"Ď)kL'$N &dHfEy:鉶6S8X=3Cg}5<};>_z]8}ۇ:c&6~r/dz.OʥQ͵ˡtv!gz8_!Yԗ?$@F_e)R8z(JËIig5=]_Uw}5nup[VOmNhv={1_V6D\fuzAXxЛEXψ 5Τ)D$Rd)xڊ L(EQ=)c$pPր;m<8Y\D"PB! J1q6d A:M9M`RQ B0h U '\0Z2->iO.MA/P.()s^z04GDla$D+LA$^`bkFExȹ(EpyIC"mFksNLIEx*MOPL-tuB?p؇=򎱧^v|e-;?3뛵ࣃ{RhU~jO_ 걃A:W4uӊ$**VFeHdC~ui,dߌמ~\K4 qFOq?"zfKfKí(Ebhrg(@,J@mrpk1YL?jjy?^a*BI،3˨Eé!*ır`f'Q# Is[Wy#8ŸZsDp/\J*.<^y+ׇBWlP bdh&%?WRelb DT.j[9 Sb ~r~u5'\dI::"%TNaYJؼʲT Y 陔m*w=1At[@]!~о_"ԭfOb-ET:9v~o؁m1[,WnKIy³xryv,Q[Jy*/IWYBˆ8E pW_@zTR[ ((W(ErlQ$L: [(@702QǸָDphފ4$Obl4v8^xXW #- oVv1؟wក671'WE0X-WgǪG?fXPU)+BT_Z%@aٿ ({|;t(0rkN`GFZܺmcH'$%x1N(}`I\79PG+E5hH,H#a$h؝6{9QȄQ87(aQ8?=ğfI©.7mgԶ7QNݘ& ikuG{"B3*c= VDf ,PK,ı O A#b!$:j-Y2xddJ <(T\ojEB!J.(뫘pc=2lO3 zʺQr;II R`)1Ƭ& hV̋ZƜ{?ѮԴ"XR٨ЁԚj"PKFzGsD2Z DЁtTVT; y-Nk#7?k!2"Mh] 0>?9&r'zu5xі~H([(9 3yu72;SGj`x@}q{Kd+P@OF 儝\3Ď O4'q`o-Ic+yDgc>Q:/w?7\c鮑/yaE<_G6гa^NޅMɴg>-By&>>=Ti*Fd0>\ ~߃MS\\5r5w뉦Ws?6g~O|xFiq#̹?Lצgم@oWzv8Ahk{%t kmimfq|D5 q8m`ˇw˿~>~Ly|-:D`Dh%?'|wo[4mJ5͚6hO=M%ݻ#NS3k@vax\?zV^ɋN7sfr^}F5?-j,fPa0-.!mSXEc2e̖T5JGI~;M1<P//m c?$p m$+?¸կn77Z<)JlQ-AcfuVD1CEXvbclQC+TggCcOTwyGG~H_5>:D&ZMgku4Y]ApByHw8wKL*92炤a?lr7IQOXkY[eQsr:NQ4.rg(@,J@mrpk1lκ^4_q!^9׫DKU" ?w7˨ŜX- CTG% c4 DڨDHB4Ϳh>CQ 5:N}YM]DոDMK8v|_\"(ɋ"(=\t|_&5Ԅgfw#ONK|ޖh2g3s'=Q(J!$Љ8ez*$:AV R6jZ36Fv͸J9.l3^ 9iXNT.(c]$n=lr?ɟ5(`0c?>4v# 5,k4.$LP =1Mc"^&|G-v:=l&$, wZdeft< )D%L{VB6ĺD =mBžl_v2+1)!mL*NM򠀥,H.8!(Qg|t@q(x<4!c5N:I$퐒j ;P,pk 9'TQ;}8s_f2e.40= C~QuxYs!Kj Ih>-tJdʼn0/ՙc^B|\WȄR tT f 5*lR>h' 8 x vM'+76H WƁ>PBZA=N#`A  G#gHUz;"ɉ7^ y؜:o@ӔE")r2It@Al/JR$|$E!HlX ɡh4gN.-F 4a@lo5cp-1kNgpQCC4gxcVdX 6@uF4\pL*_,W(߸qS䤅lDRRGC(\BrI4K3q!$e WZ. O*_\Q.^- I:ZD)E#BW$\~ڃљ2H\"YL agyLdt lUDSTL$H6`&(qA,ڀT94ce9h`^ 0l<jǓG_"PγkR4r|+2*'8@ ӚzbJ2zZ 2E+]4!2-"zNxz{ H5_nſAC Z+vQov͉҃7( =#_] -@ LqnUHrV)p)pHL3TԝSm`Yz6B-c99iTF0%(at\`R8Zȝ "tJh%r8JъDҺƕCպEfu|~. 3M]^25!IJ*}4NrǛu~m~ G k3`k9y|Ű>>s.V9Lx6loXCf_o8WvK{3VbUӶlwމL=Nf S/?<͖l)hoq_ ~VʮE,M57xNqp*xh|1%4r7 O?G<έ\)EI$)oUң,\VY];fŻ6 U׈˗E'[xxoVMW lnnDཇR7Ta\zgʧ?koϥʝ`w{s8'3_lnvn<>/JJ.{ԼgUF/@EI i^'Cq݀,ּK\cIIO* m}6168ݜnjr_v8]Lm/笇L/&h׭+j/1_K_u'K%#|2P&η@b.H[2Bs%Ib 6]蹞oC'%\",&aye 9E "2"`gomy>q@>EX-[VݻnȍMF&U ŭ͆s7z>eD.ǟ[֛Ntht,;ĖCV}CW;r3?G n=y tE<ηtl2>N:;\rij95G5yS_9z S?; HRu|saB^ q~(~BIW 5ĝ$Rp]ؐmS @unJ \eS lp\mDZS 1PK 1Ax *4$ְ+lT[ZhW*1.u"KpLQQJ*/!ʗ =4anYc V(F(8⽴uVwDIEIYDIr^2 #7Go"bs) 帢\($B1`)kL7$N &dHfEy:鉶6p1rvx5DJWqkEa-3 S {7Y1=w}JU"i)F6"R-6:>DcȟXF l87T$tƬP*FCxI=CR-U1%(T4mSlv+(Iuhc{n~zbi_Wti :3rBqT? cf6z-UF+*^iTs•{>^EV'0J@fVD>d}&>K S_y䓽Z*ADy5=XqZߦ6G_KI; Ҍ&~R$g,EdyFxaH*IۤVMȗgռO@t^A1]]e]h+'C(Pc;zc./9lVлI>ԍۿ|^>%t> 49,ۼ~{/w+-A2kڦm)>s'ꞣ,Nn:9K93&/<|沾#@] H^-l\R8ѝ\C.K5 f'Gu'5ruybB7,%/.L5,IJmD=kImlΫw'ն ej QA;tZܣ-Nӽ qá?aBM23EL Cxsؘ  fR3A;A ۄ݄1gvLflRq`w˙wliTZZ3MX'RkYgneVԤ8Nx8i`o mKkaIT wBPJs6E'$K!("$¨LN"}wm#f 8ݳV~9 oːu2[3$}-dj^/ml:{]uy<\4;Q5˜†KE[s [&W붜2f=S2LvS8hеH]qnQW\`mQWZa]]e*VN]u@-RWH$5*K[c]ej٫LНzJH]eu{HkL-箮2;-+ R"u 5*x[U֜R TW QEJ BsWW\L:{*S;M+$}0V™H\\Vc+el|xi)ZFAg#OH'+ \_7MgM?33cbZP[ .U"H5j:+h[tVsWәJٽE5mЦ=;$Zb2u=ZT ҩW؎%yXO"Mߗ$Lمeyi 1hSLjJ (@p[ &\9Eo)]y3 s!84IˇD `N . DD)=]sWvN'-Ļm|#[`f#[5@/+TȤO8}kP>r+U$͂.(bRTCh|Hq1do#e0Nj"0224C4\>9;.lNZ,/piřRwǫ[fpgWᄏ?,~Ti*Ftpwiz_0$3 ŏ+$?ASvմy_kuݿO~ir|ni2koۻr "}wgm#Mz֎bHmða a/H!5̻o8Әdh_#G-mԶj΍:ΉB߆ߧ1rto5N3''Io֚8O_>pzek3WOw&JŽ 20 N~.YmuVZCB4"6p]s3/ee,_3g-j%ԝff8˚2o^@m{ cJgmEx $\xe؟[#;ο/o[1"xCQP.pZè!.rDp-%%Ԑˮ gzJN;vdƟ?' iי%%O_md) H*rWI:B5(<T"dg*b:uċr!툒}dCж`#7[Ytv6c{ߊkmDq5& 衟 aUKX(ך!5 ʵ4{I=qv٥] mu:JM;E^wZd9z.hCL$]T'  F8YIP\sշ]\pbOP3t՗8`|~Ab6\9߃A |s_xK>_;|0yiz|,|*@-@jNk?tvDJF!LgrA%U%YR\!bHZW2Y墶 @8ez Zp}KOUpme/C[ p}s$N2Qu$  t1 XɲAWEUO8'uW(Q_Wt`"VK=\ 7g}pE@7adqq,<\$O%bA7 ]}\Xϊta?{!mg}lSMrJSf*r/N>xZVyoZ>/Q4qR)pDW .eQgQ3y{sdR @B\jDgs6FaUBH iVwz vM'+76H W!bԚq;  c`0,6s.0x~(SDjxPX9_$F9*Bk ~{GG sQPCd5VbM4)dFsHH:T8ђiaI? _iA9%vd}1r /5ҠxZʹJ&bI3@\1gb:<21b*KFp%<")RG GZ!9$7hhlg6}z֠yTzeP<:JTyu6Lwp>H@ ,6I\r-;u|Q"R F#ahֈ 2H VPd5IĨTՊysaq6[ePG$DVPG'¿LIExb"ŦY"8q΀kc6;ODwf#X?ٚG1O1s 8ETPg/-7e{!Ǭ̠cp>.qr=3HXHty#T6F)D;MM. PHY"!(O'crmڍ_6R.9%O.>~Xw<w&0.s 0>*m@.(l2eXNSN%EE4eF1.(:RDȥD;IjyfgG{}gk'̸vt(dr2 i/P BE4[)֠3i(gZlvD S'MB2tJ`U Jn)`2Ҝtv(܈RN BN Bƒ&f-:m?}cOT~tde-8'm pϮK%8 A1TBqhKYg|L+]ڂSj:9‚py.\x)צev_ǵi[U%!xs1 Gl D%L{.bWĺD ݨ-zlhTGckJD(S<(`) kQ<2P* 9%h|QZGi.@$X-<Ǔx_2E3ijvY霕NĵQͽRn~Z}Y rD V(c3?E#Bԥ>mj@)F2$b^#< 'l,dE a*H\@D >f J xTl"*G95/^l:ۑ̼_?j1-s<>"c8?wРodŴX*gH CWJ%Vr B2=uBnBApYu9]?y1ɝ+KO$"8Ը%ҍrB H+@ZZYLŒu.hn\Ӎb RpNZOBsNxj qe]T+7[H4Z<f}fD\[nnݻ.4ɍS uVf =i_2kغsjzsNm6C~mͺClYaYng=?5yޡ煖h8fӚw9;X"Ϧ-UÙ+V6hΖ?kk SX}tOUKnl~ٜ)5?&'* O-^_a% s'qTsW526*`<P\[Xp]`Y˵pAb"&0;c$:s8P*4e[, ~tvz R$˶CZ'BZV>;FJ|zY%%5Vz*b5RQ9mD偂#K˻l]ys3 $Q7Go"1ŹDDŽσr\Q.e IJ0ǃ@SBe80(3Y`jDzMΎVz >K}0}n&|b&vZVPOi1uۯ WiZڈǐj 9Q9OO W,Wl* ch(Xj#CxI=éZcKn'0v ^gCIj=v7S}\I',A P%En ~G䗹5b) "Ud&oN~&h4_Wmf+8&g/!Az>s.\ \pWό@!M>]`B}]t,RӰY"Ç^-r7jψv}mz)%zl0?9/覣q܃n*wnڽ5[ᎎPcĽ%uutv6 5 <&}6ݲA;ũ` ocJOIݟǾ}1~g,TZ$JgٖH8v[E~G̞Xg_GQW0mycdu7  ven2V5Y̢:]}l:PL1oC|π/Ju^tAAc̉51(BXtZʵQ{Jg*k;5ұ.Q`͇`ZVq DžAq;2wwvT^Vt=K!/^Oj/"Z$XIju,-VZ5XIP2ֶXy-Vd#W `F\MG-'.B+& UXW \IE\%h5:tqŊvAz+:"qc &p>qU/3BWG\ >h<q%F(1k?Gq%b0-mC*R΋`S,; -arQ: 9ag&W UT9&9XN5VSq׭AG$x|v \~4b:A+׊#VRSDVx."@{||wqW?n==ATb!Lzbq@pӊA˞HzWWob!*m=m=[?~"u}٦mB:qʏ VrW ள^rÜ9Q [cI"!4㇒f336RN*I"0VxAE;m$1Zb 8M]Ɉº9D77d/-n}6ʭ-e 1 6VLBL * p3H"ClaI 9F 22ca(lƌH:c45t&!RhxC`;dD|ff_l mŢa{Zڤ, mR4yvd: lHΩD,2SdԄ##9A0v*-N rFP8H%e_Ȋ_ చEUՋ- !yu(˃?]ieO@9\!r -H_ܷwiR:sf{MScѺ5#OET▱,hc`b-:2Щﱻ&Xzp˝Zڦ1Ns.]]11A0ɵ r B($<Mh՞<#Dz4‚S? R]Be4]뇫0nc )oCSg_B/{ gE,6 }붾ܛϽ^Mَչ͕.-T*g#WImV2$S):R B۬n ߪzO)1k_EtoZ&aV+A+yD~]Y]$Q1G EέaYqGREb5z})t@85~{ߩS~^f^Y5x4ٵ \ɣNR$YWC֌4]3tu o>t0.ƴ^-Jv/JNX |`"g5:0ோI[݌f$o7f+\譲îBw/7i2̛aot}G7~/'9_37߻?̪a4'˟z=oge:J}^ W!Y鳖[k?mNGTDsTsns|p 7k`, Rjsfȵ1O)&zG'鷎>uCO1F: lp6HFjRz\Tƒ5XeܟfrjHjVuvÃKę9hVO|"=a&U~9:/ \KCmX3fB]+]ݽ+ݨȨ h#92 c[y@&[0ԶBҲ I?:Q(zb+`7JJGas)aQxiĔKL",FC҄K*Ljdf+냉KM-a$EV 1CҍaHzhv#xCt0|X\v^,H C)ss3*ߙ O=f滺vx<*ncb̘d`%s>H-tHSI 3ob&8y-Xhm "etۨP 5T!07z^g|ұ&0zh .)i]:ž"w /\v'k0.˝7`ғw 3 RT r%\Јsfʵ!,( >:ބ?ϲxލ:=Ilgqh0'{YBےik4Ehޜmva;L{iχ2)/S@W̱ > S_T|sO~a'A $WOw+0\` |Q%~W&OŸ|9.@<ޖ9j=Mko3T1ޣ}lEQ^7{ev->VN1z<#.Atnre4vM -}Fɧbp~$%堣eƽfThGIUGLmh)4wAүun2){l>L_NKp|峞6drH>9w\nt63dB>ݎ]]c޳['f))C-O,TCқ^M;(]ԃԦ]@6t6!OW/- ;$)wJ튤c5BBwU!FG{JgVl{M.N`eqzq U&wjQ hMwf[2F%Te5U `iVU9Mh'5ë=]}l:PL1oC|π/MϷć$;9ƘkbQ:; <o;^{ < mUAjp\Hzj4~AeA_,g)D5FDe͸-..I62=oߦ`K'DBA$zk]n} #.HsTV0EVU*F=h [6 *RڦlZdך޼ڨt.d'D[4݁dY%Hv&ݤN䋵|r);s.`9n܉zlwnۦ Z@rFϵ,wJÃ&]ϖk6D-N*tUvF<2!r9u9gB V]5rږ@vFu =_Rᵲ#G=`!Us_u;/\D%-(c [.]FHo,xǨ@6xL"åĊFtHXG"il&y0qZ]nZb"Gg {gCS:? =SgkFHt\G+`T7|XңCOb527w:&ű3`pd&`v$Q'(#A3," 7"@%gh>]&>LLH^G*")9D*"Q* J3K` ]W&*BP& -e հSp- .m"Q0o|5I6BTX̾VWȢt\a"HK'n`l4.l D*O܊_ w?'^ߌg%H6:\^3" 77MQ Wʠҙ_>u4vP4Ї^Qt{ Gwsv;Ӽ~quȺY֪(-#Ŭ&&@> (o)h~Op/ ArN| /~{ͯo/ï߾D]|~p00.uAnX0njjo25Ul)ir77yG;- ڎQ)*y;I*DR^ݙ9SR|'_ͯːTgj߇JquG"DP.B|ItXi#0;dْ2c0JS $9iZ" OY24Jrfv tz[:vgO۟M[g`E:e}BiP4ΡSO;A;IFrB!BV*w?h;׽l, !L*]2)J%K&@,ᠽjQ4MpS5s`nQ @Gw7zta+'+j1z-Ӝ\-5Ʊvc n([ήzc7 S~̿WAɘ LtVx;9vVUńp!ȇvs&z2$C;ء"K}HPkcP*Kb̥&̕FeF t1`U2 Z9aUpp:s/|}`VdN 2D&sv e)lH+ z%t٢u#+7Ǭ(j k6^)#=0wB&Z y+KsFxFI,ŷ Eی{A)mxb(d k2bv"E*rީ <ǔrg:W^EuLZR&h6xcG.-rЪG 1:/cgc%ѹ){ۺInm-Yom:W?|Y̺hPZ|65dted;w("hY -r}ȵn붩&3 %x`h d%T\g!eY۪"0QKAlY)_\;d ʶ YkC'6LYY7Xԗ;<$vi-||4y)Ӫ>atax5/t>|FHNe:P;gu8-5%\] ߘƤ振J~p%I?}ϟlj`bXDI%_Gͤ)ie[R$.2љeG')bn?~~=x2hM4Qz61J89fr!0*`^$Wqᚔ" U~WCOzcTZr@fC풾ZlWqWr%w\Uc(7wB]mD}lO+?zJ !,Z}}4'=}k%Ƕ\l3&bk̬t)э:{Å, 1-%BM (YeѪ htkEd B/zݨj5=+ ژ%dzR($&f9} hR\K\f2R-c5qQ$J9-63vUƒ¥D٦_ʮzf{"9I W5b l" %\ʗ»QFP0.%v9iFp@P?I,Qj4x M&wYWKe.Y0v8- sy.VjV{~f5aBۣq 7d ĂIؠ ӥYEMգW%t؃pH6\F"EK -4F&[Ph8BWC6o5MkvHM{vXRoLuo"s֙I`􌜕td%A4zF2=Fhc4z)7zF1=FӵF1v 0!]hmj6zF1=FhT9+4w֡3™K~ HEyx>?0 S~ }wK~Nף^JP2"k2NןJ(KJþ)y dI§@FޅnU%zD9zOd|ѱu:?>|,l[Bqik]gf=>CDv1#=,^,t2Nf{ŏ4o}e pfuƋїRԛb.?zͧ9ɍgOi=q4Ӹ{wOibq4lai=q4`␺DU!7Jz' MQG%llB-;AЉũCKtXm~jEe(El1-5iMRcZjLK-$yV*UiiK\iw;w_!,e2ˉ2m;H(;/v3aTldkv)1I{?YʢȢ9 N۔:2R$RA)(EheB3n71IE+q 9ʀ$ s㹂IP8ۑ v%XޮG t[LwF,,yc:>`E%sZZg ɉa𴖂 O.&aU4Oo[Ey-.y-6[WOBьA%kc6ߜȿq*8C1 ³gDmg>cGG%Krt^yeF&lhx"u9zk2z>%U5TV``)H"*e:JBxEt:gDY4ȵ*_s)z/ɥY\<|.aҲ|ڮr IvO_fԻA":opWkxQ1 9`%99Mq JRTP7SOYgy \4QdH '̓%"@d$5G3fyX1 ʼ yGĖkRc@6 B ѓ$ gZIe,մ [YzZlmBv2/^aċOz| :MFsq3LU>htYQ0I+@fXv0,`P-/F)%o2&vYpo_s.\#E]*C,W9Empbr=IMh9&lxad@qcEV 'Vk{鼩fͽ w`xλc˹srƊzsH*AQz\iЌ^Wf`I~9Ʈ-t V R)z\NT9}0R%E2'h\ҸP}PBNwZy G/:ׁضhzFGdKyh- =-š),4@@nLdsN\'ՅtRaeJyI3p!k㹳ֆ,7$id 骉=BHO>=-lx̸/ h.Owx7z  cE❏;;uV*0޵q$2KlFEqu7/k}yL2I9Vz.H␔4)i H9ꪯHX>sI%A{ !m8lD$D8BE4F.;l0!јBm FtBIAHT :o;c $ye4zlRPj4BZ":FͪHyiT|btE VjvMӕ޼-\IZMB돉>!?.&:sHolw:7+mU{]z-7\)Y{{A1oQ\Wg4\*l/ҭ1dXczS>."x$l峅6s> NJt[o6'm^Iٓ4'%O$ _O|Z%[e*9};(_yq>yk/"L>d^(rq%XG̖7_aƓo ,ė80[V1FEx"zm!o' :J 6$Qs-9˝R 3I?NlgjvM6^kGC<N6_;jQN)93Z#ɼkD5qۤ;.XζE»1O8[L/-I3JecF2S?lĖ,8nzAp: } znZtT.8o p> `%^L|ԯbb㔻#ipyWt> z5.e~?*r ۽MU4I*Awcdy %636X5 !4<?�^Ck}/!0\fjaɡYCk$d5=5ViJa0hn AP8pߎ)N ›LmꞻksݳI!Tr){n{>uOYM{Bj覙z@/?Q)1.uNQI4Ɯ DKf}PcI<<_Aqy~}u&t6SxU2_sw$ϧ:;<[E$Sb\ٻ/Yq䗢eAPӬ,VDd4-ݺAݏ^6 ޽vV829̀K sRA|xC}AriA&>~)Ho=j1C!fH Y==Oc.*u1Vپdw7%[ ̼<ط E۬L?rɑLΑV͞\o^ WL&ydB s+X6kW &|CHC"vr F>`x>-mVqTs,Z*C10,.P#7EERle.%VwՎu+6!`B a*5(K))a8z<83r6j;4-A̗׶r"_w"4=ZteYh.@ΛܨZzęs4 c&]-HNPFfYEpαh]N,$y /cnY^G*#R"%1dNF0K4N+ l (O;@2f^wX00iUA;6f݄yM  JR&# +fbxfI;cwˑ/nOX5m$ !MPH0BT +$aD0#EJRRr-%Ldq<7R.:[+U P}VgυC$^ tI>=|q'"y!IY}+/5Sz)mq䅙f)tO S ~Azy5s2yАb@rOt"rxO%h+LݡNÙ^Ng Pճo%PHw]tb ETa9~N ‰::=I|F"Zil|FWw)\f%,̾WWȢQ\a"HEڗúem4l>_*ѯT7L~Ȯz;+_OnFo|xbU-!F* |ppY3p~Wh>0z3֓=1nHk7nq!er`bpսփlmR-N9@wH}ٸ<(!ŠQW5*'~/Ǚv淟ޟy˓ۓoޟ`N^ۗu~m$ro~~E׺k]S娫 ߤ_[}^l f-6/׃K/̠O:tZEYfF>Wlge`_Jq`wVF*@wm~"d6M1m+mXv1[S8 :EI'5}z1 ?i;62K?+à zX)E|꬟ǟ#21!,x 3FM)-T &`5,l$`1֬7;RDWkS G¤'  "K2P=QXk$;ı#CʓO)O:ey{wQԞ'_]tZx[qD);los8^AO"ȽXjyPT,xqe¥ޚT;TXH_9Wphp:KQ kT&mP&qxKKY2{2v.2E* *R 1#3aM_$05crֻnTx7HN[伭3>p%f4GKJMsP^qwTerюQ;GGsQ6䊟 Â܉sf͍>g\LqЖ Ⱦض Ƚ(P l24ʈUi1F{ B!L4BD +7bhsСEĖPƽuH Bs4v ;#gX' 籴.4s(xuHOG&eeu!pr6ug}(}p bz#*@#$7Crx, {D%}gP,x v}4*H=*Q^\^\=Cq%j6 a%H]5f*V+q& $9R.}5u E`FsZ1j3-hXJjGHSO^'9 ,!I-4ŔD"x5*8b>s)5ֻ/Y}+ܵXߊZnDlxiB[~֋;/Hӭr϶9\!r4\`D$+hnx䩈G4*ɩ;RǹΰIoД;1bF;ĒE*L<S 2e  2g҂$2t9A&N$u+"/cA8Q݋Q7ΥZ͗:ɻ \VK Mym)cHC )YХL` x[> Mqh97_Y u^ѳg-g7o0&7#a-&dQEǸO'qځc`b@[G‰uXTo<"7fH! I;oB!) EqV{E NVFtM{ه/޷+NlGa-d&@PoGP( gۦ¼IΆ#\%7ih 5(o+Lr`7;zvȺw`[I06X%7!RM /haL7UX0q_shlE)7j<۔ch"(9kZ ?D h9}`ץ9(3uH+O3׸-ɓ08sRܟ(r%CAUcWjt(_H$zar& ΍:W3Im2}*"슖 B:Έ,B㈄uG^%Z6Jsh6#=!D!e!x0k5f@%{-#chnDt;#gBl~ .GehL1zWzZN-<ߠy`^Kƥ,qk+m&DX5MWZx糢Ms^D$mrHcsSȀkvoOo۫M4:g5t,7sx3ufWM/o 9ޠƛa2h~7&@0S\rwzVԊmMkR6O5VΌ\9ak>\_ӑ#.Cp\(3yncY0a_OiNz1j؍RQV1BT" C]ˆ^FcD2=DDꥦ0+CʘtLJzD*qt>|9bK֪ٻ6cX}T_nݷX!_b՗Ę"ܯ5r:0K>7\}~<=)nH܁A%5;8IǸhRYE0 $-i BD×ŗ汪G<$ tF>‰ HbL(sLFә'*(RƊ I: N_Slɸׂ<d˹sr {sH*AQzE)|c:tBWv`'$Mg~t62zq`T.ik(&RC S?[ą/q5E$v(Ki+k{ռCHށTH?PV)z QpBs>uNoND< aKOa3^=]x!~K I¡^$jѳlE 5)JP-V:>HeESB(I020_R% LH2@o0V`+z/P(CEG+d4˻U<-U˥C/ ʈ8w5NOIa B`Uig*Q μ7(9϶Y k{>`6) Ȯ ew N%sc`ҁ  qDBb*`@oWch㞤a;Ĉ!bV̺G9Nr2RO<ș7<@وQl0n7/hڅX"km0YAR 㥊D&52xfd Tu.#;bJneY]uW_\{ax87#>>W&r׺miHV0sCӛb5ey>eX MIzb,<g\2>1tZq^ddJbujqmjJccbN͊6mMnn;<9.( |8˜b{}NsEbhe.[Wˢ7ogGUk`L0[Ҝ_]n oz0b3pzqmFg)!i|| }S-o/fF6:n6=]|~m XuIAt]욡6ٴ[Zl Κ8`ӬoĆ1 G,XNpp\s}C6VJgVN!-9Lhm4eلW{oA6=?^蟟v?~x>|_~|wwtV`BD&ׇ@;Lښ[N--0m>bwW d/Vhau $O wI(QKh)N3~]E\c/.&6?WOza1DM߮Se," h _qwrcܪ#]w >hWs0UrqVN=hKj7?nr1-db M>2k Ɖ?589q4:+Q9J]J!8nKHzN2i%3%֜9AZCi6R$8:#K\լC:(1IS"Hc w>EzqID$b&k8кfT`kRBP8*:=q0ftj{5+Vxk]6`.i⭀-e)A@j:>JH $3 ` \j-F'~ydND:xf yR=RֺAUTT텓iXJBLrhmU>YV%#CZQApoL!}B`%fLfɒ3P9h8h/@(D7haybrv(zM Wj4(~G. rQ~k8~=˭a Jc8o²G XYuk!$c0UYU൯Xr*aBLpP}= r]Z(%`1fxM +.XJol"Enm "W̽*TYMU;%XZ!bk/biCZ{kص˷Rd΅r[džpf: ]ǨJq1:U^]T=.y2-E>)";??Bz2k#/?| jRt=J׊)EMf(+<LKS 9+<kd>(D' ]ġ[{#_E^-˟8vGs iw\oz??m`v.'Xy}f:'s&޼tlQܫsqG{{xEx_bp/ޯ^qF@n?s.s|ŀG TQ;'cNH'-/P '8^)#sK 1ΣSRDRnɁeGCsFxFIPyvS̵m6:x/gP1#=Q+o/gs E]_C(Ieѩ htYEd B/jhU2jkr*C, ѓB-ӽ /ڜMk+LBm9;;ߊRD3) %kYNU(*6fKL4E͋?`u G!@!l "b iA(#YQr9iX Y⚣&h.L.Y0mM[l~4kHv}6oWj/v`tZYp*bxHt% =q&˨i{YP&e bRgQ2@Ztɐ-ApLC,R b5rZ/!~1DlmW"%$b'obIiˎ/> r\ƒNY,gId`]k'cDgj{ȑ_ewq_ Yep3s _m]dQI+,jIZ0@XME6ŃyqV^!/Aw5:-,p◍AON.+Ql Ӎovm0±('2&(JQ(nq]c`)Q0"nb+Dg}&z6t!HKDYK! I5mRƁZ1&'Q,8ZIVvEYX \a~zq7g,|em&i)x0ri kUt`0ȕ>ozsa<<d 9wÔwtޭx0%l b;7R܇ Y_&}\ QA),"i# ^[Ho={c C!fH Y@ KYb1V z~7hzpka]UfDHKhqa9hI<ބɸ(ǎ}-p\(R0yacY04LTb/^Ib 7A@qTp$V1"4.[_ a C,AЎǂ#&2QZ!2Fj㑻scÜ#iƂcX,Z(QC֘Ys PI`<̛E@|nc4Kz}aX1rXCW/iůEKKUG;9@#LK4aQXa/ {lWQJN-^,B4Ά!'0 P#^#I"h~g:*#fF  S/C,|O% ywYUGT]jPb}5i4874z7.Fu [G}Hpj3G(2k}oZH+e]:-8$0I{n 񿇐=>xR*AQN܇0{f!['!HYũD,2SdԄ'$rqARl2H*w69$ι]bwd¾w]a_\r2&EhEG55 B͵Cq2i;:?'{IH!2(SަWDUI+02kjG`UŽN`p$!__>_.b}U__x47'bk>EBzj1`XT^{DāYD!y@aH':פk7`x#c8N$; cNAGJ턏.RSb= Vi$\6z4]<+0uFXQR*FG;^U _z7nt|si[LySiRiFc~í./aN(E lƒEiC|k`l>RRņ%Hgk1 ",7)Edf /ha$FKTI~!ݷe:$NÎ.xp~Kl6u_}KW/< IU<~]J"G}H.` E9$ ,lN)<㇚g!m8lD$D8BE4F.;l0!јBm FBI`B<\o[Mϭ $ye4zl5ͭ`olyu/!x6 Osn)NfW𥉉[8%YzEktPgB2f޾嬪ǝCсᖐkEtťI@h1UńntQe;Ѽ^ql]7wj)܇W[B!1뮯})[8nKT& ׭\st[u"f搏7lz'5R{ (WӰX>IK~m#&% 3ЋJ2(HZWzVю%qokR껆U2 Q }l6ErwLw=an҇rWنxkQ7ày[m}QA 3C_jjRZfzr5ҋ]~W/Wg5p! RLYÞaAJGdDFE_0y0pBS̉rbM""[248xR3m\ðB J;+^u4D{S("bK(F[ :aFhNPƞ{g+]~˖.gynznV\wY.w V `da &'B[A+`YUTj7>;,kA?$ ~1@g' Y]ԃ 7L#8iRo?JNo L0x˿53E$^ Q0y` m'X9 e1?ʷG( ߞ]U01` ?{V}u$"*iYT0(ci˥ :7EEcT %%p"g=sa>l냮,l%IF\M]י\xM9IL6W^x-K5+X|Un`-= 7<4 $!PGu24/pE=$:|PURoփXS")9(fVfXFI$Bc5 PcCʪZYZXR&# o(`a%q\k! ,_^avm yO#RX!)-[$t)IK0Iƽ(+R+߽å+߄/}n~VNkυ=8Vgp"{si75퉜`:U1Lؒ_cg %u(/ ;N0_Ww7]'ծ[ x?aJ<-:vʴkŎ:SD.љ Swd2\Q93xqFh*BviC(XY!jq5dN4xX8|^׃KSf`dߴ5U\PX0R)ˑۦ4־cfLyǦ簄 K=jJi{~~|5;o>8Yg+sa q4<;Gꛑ](6*ɹχvo`$Z%Wt lF3TyjxR(4i%G@7m/5ӼsodW]}Uj9oL'9Ր0ԥJ^qN^#l)l LSUa{ڂ7;PӮFf gexPQ&R;:7맦Q$x(A/5+޹u +P%Z3k +ڤVdx*mD-.!&ۣ/"ur)\ІHῼ3&H|2 Vue^1((WL{eu_׽P)t;~V21˧WT!VYs۸ȕ?~7ײ|ɴ/ x;D8&?|]lٜLF[8F'O79J[@&Ófn 6d2\Aةdgj=L%mf ̌Pv_/ ]5qWX " -ZD)k]]\?DPS<¹F> LT1p8BGG@Ă sXrxT"7[!{oف )>3@/a=188SOU:ñua8!g.l8s&uSq2ݙTB=gNlHƥ9ڊ` p`ue0j;*:.u%PWUWz*4DU&XQWTJB詨L-5Ǯ2+F[gzMml\83\G\1͊뗱6ps{q7&_5+pɃ0E,/&-O#ESPawY_;gk>ߧL٨}]ӻׯDE N8S$&. Oƾ/8.N,+jY pl]NYI!һM3䳝Tói]8V)r~9]y7yJ G d@rD֠>!5k8'8+N!Sj:SiZ'%i +$y} UV}}Օ1SRW`}:7\s27H-7*S z>Jn?7ϸ7޽L^ǽ~eE-)IUK12 LPRB>@a _yI=~N 0^$![/cF\*TH4_u?v_Ž>]#.bI~sw3(aB tQjsɢ~9p4^@~-<Uތy_ L|O0::Q%5oe?LFP#q*TH1t 1aMlM?l2hಅ{p\u8iuem_- }bUlx֎޼{}[|4XLpH/`kYD_w0I"Z0+eA Ȃi :V:ӂ-$3kquiIn Gɣ#zcFSyêFAxH˹IB 64Q ^!9O93%5 9Ak#qSZ9b9X+PGޗO !Ο3-?[pU~rKp-L Au?>Bz_6&pA ET:ݚ9K̜] {G#:j>o!Wl)w(Z 1]H%b&:ig\\g^(p OmeG*".2+i(Ebh18o3JH}X{MnpR9&#gŸSS[M^uvmyw?ߠ}F7q*2k+V}fpz̮zz ou0U]gQ*> ]^?mz[غn]LZtfիYu- lYLunW-o|zA -Wð_եo ˴YEWtlꗳfJ5gӟ5ݴngPmYؗ#g-S;kv^ mG90VJr h\x;zd['}q(Ўmy^Hon-ՖnKk-J<[G7z|9ipWoWM]+*p4& #X pr1OOL6E oHY#TcFxI=CR-U1%(T۰7Fz ^jg=9jJqՕrbrXWwI<ib>%H{M]W{9XrLF^+[ӌǗ5>jUOy 5PaBacVy<:hil@u#ss9ljT[iJ!ZH:@L4^( (4K".Ek'6b'^wPp x-㷏}IbCRM$ eH>0*$8sӆH*F^\n׃_-22p';$dڇ^Q_ *LS? ^Wp=G)'v=i/65,=qx~b*k>;P,"иii, j~GH>!e G'tQFj2g[*dAr\'P'J#IKbF8X5Љt r6F #aଶq. M  а9ahs(v"Su ӏ7C_> gᇚmӣwFO7CxOhiKŠ82깗X4@cAF-:JƑu@4Ip^_w=)F z3%#SJAi44 ъR- B\d P ׻GAܶX12E,n >n9JTD$!'%-H%YM@Y1/ZƜeM %)H` H&J9.q5Ѝhʼn!E-m"@ZAjVmlAAIVB̅3A>G; ` S#s9c"q*uI49?FYF?(̨W.OSB y?= YF'sϝQ./gT;SQB9agrvܟ(^hj,L1l$>yDg#12{VD鸜\{?'7t*n[^||q&Rq:?mc;eZ?ǛVdJg?Mt?狿*4@#:9MဳޗM '6\YqGH~5*sgF֝Ifp?'X}z|9El 1J[bnz݋rs "dqҖ5DH׎bHWuða aYaeQyI/{7ݎٽWLΈ1z$Fܨ|g>}z+urDNr7@a@Wu*'~t-'w|wp_w?~x?7?|Lz>O u$LjD?w#@@/[ mǛ 5l04gMz+r͸G[ڮq7exJfvZK3n6.C'̾d_MtG?B2 hA< ϷqRc=>;oecԡ$NiVǙ1zk b:$xњ 6X?˟ߐIe;u1PA[kXI?\KIa5MBYRju::2E%SLב=%ϝO7yUEU&+mHГuIyD!ކw_hֈ"en`FjQȢHJ%TU**3"8k CAnB5 uPaFa$;dt~ubt+f;KtzVRC `CT @5tsqX WKDj*#ZۺگfDrAQm7r+p9K:\.YJ[H/HV"n\$ 2nU+Ԩ?8kv8c,BGM%>|{3y(rrv:LY "w +Rժ'}?Śk5 ##B`MO I<~ݦ,O?xyD&q>zżv7kOTmv.~ffGތ-yƛ8+[nV L&j6'Y_:vv:AtԮRN )jK_ߙt־ @m #bԄTj WS0|2  &-h8IrZ7ʫiެsҗ6\%lNQ,YB<YDAyᅃRzYDE lu.L(WҪ}ZޒZ{tG;>͇i/>Y|WvoI:_scNҕυ+'%M C8kj8WjQ<;)E iXWr& -s R \w5[xl<ٺ!p/Zݹ*Mt*3p<$`}樜2p•60.QF1Nȍ @ul : YFQdc(dn7F:j ,1y;wX]кPҘ[xq}E36:ALk``ݯ`~gjZ' H zNNƕ%N>Б᯷w"}_H[oi;r5{[QivTruocEMb(+OxZHի93<_52;9Ó& ԡtK}rsIEq φid$hu mw^Cձ'g~^\Nl>Mzñ=t~Sgx;:ttÌY#qc;z3, &p?O T2PMnxxCMȳdeH9sSx́yOM6I٤!B ½G] bƭw{:t$&O!(`ѨXtkR uQ9mL91%c@" ɍL&.@+!*c.\v*H E$Sj@GKau}zo!H+)#),SIi:ddФ,"hHt8z% Z!&#.LB}X S!n ڃ[ 53\"E1r*T$աVTo\ƤuH<-.YKkqi\ 4Z1{'7Feledd$JX^m5jid^ J>4f|sObqy}ۥg*Ǩuл;ʇM%dϞW,s[E*=uݫ ]k}hE5r]j=`gV`-mu>)GdtJ.%Yf6K_ o9@"\]Jg+zve'GyUXsUH$pjux@6E ?Fk6ceR% >Z#ڐ4G6 L[QCqEB遘˧<$$7ZHƐ1z M'7EjM:/P2h]uݡ"oO6*D/D ҟK6`ưh~E܉'GuL-}k-'Vܩ8)k?~GοL>iջХCTk.dijx.ѴV@x4VmA9KU(c*xQFQ6g-&fJzA&^49)*eRRMkY#~I]gl %kXVT(*6->ʳI+ äx?rnKS._Xc L" !HDCR6(%+8+'#`vYh*9)Κݯ12C)AI,Q*xQ u&s[.YЦY}CڍqǦZ7'kv`e rkxHgKTN˖YFQGeqB5QiHJ9$dȁ &] J$a"J'c|Y :iևZFnQM5hX#V#I/ ɹ'5ZBQ3hH!%"XN1\%mV˨Ag|6!e2،<踸FuFT*P9e@_GV712}e2ACYy)\8>%k+﷒dQ1U&%REF"D2 Z ;qet298s א 1NE+r5,˅s9BAt2BcFRAش&CdZpgZj{'=1AP\_`T.EɢYa F\𴖂ʥy1 Z o#;AoAvjMd;y2xn (v%#odiݙ}C1wԲ|C2rER :x*qSyIxLklc,~"9aQzvR$Ѣ9{i$F_rG\Rі]( ѡV0%!EgyFd:gDYR+-s|x&.YRyS6ek-S/@,cΈ+KZb7O;o ׺0wԏUd7m۾9eskH'Fd,|MZn ѳ踄rZ8߰s|s1@xavk-ԳZ@Pofdۣ(o' R =hwufB68L>rdHzUk5`%7>&G8Y[ Be.DrF&n"S8-`o1@Ƃn.h?[Z̩Tc,74=Ȣ9Kˑs{>{{}~ gTO.-B[qksGLMG<"|LQ7aް]w~.Jox9lJ@9s&;o?GK#f&g808)5{&#&ѨB.ͶICWDUoGM[+8&#ggmٓ<uUURFQWzͩקԡA B4M_~_?0gӉ5;d||6|UFμw#Gg)1փ1"Xo鄒  2Y&TabN1UN v# +fGe'CEE_.EIE c)DS>n5`=HDeYT_fz/zdznօ':N-I1m|Ƕ g=_<;5߲y^zH%L߮|j߱ #ٮ|isyT]t 6z,Wi~q՘M#@ JU+ x (D܌4=$Jq{nH3("6Qp>pU^KXc#y;֒4XDTz!Itte#bȒҢCh)<|hDK $H2N 北nJB-Ѳt Y3;uSo_7JPPJ#?$ҒFiӫk{/D[և-9^lxqdZK;:'I&B`ѕҹR I2( "Y̋R!r9$9^ ."̪gbZ%5@! X`\D$G"bAsf(bڛRݩ_{Xj-밿XZ̔g(#O3 Zx@K_Oû^8,nׯxiZ}R"+]2Ld@ Kr!23% Q.&V75 Er s>Ni&k ƱEhsA\QVtI<ɡ.\8 o\Hߟ/̰ɷNUT ycIt%~X\Yp %n θd^hZ n8L׾{w:>M][MM7%[9X]`,yUŲbڤɵ43r e8 KX( bҺUܸZLFFj]t~~69?T1L)/sֹ8c/Fbf/6Hޕ57r#鿢ٍؗr#O۳nx&i#qI(Yd7Cw R׃X8*3e"ox8Ln|ވwoE Yo+?.iz]pDgӷ/q9gVۙ(bAi:+b7S{LdUoKn颯یG]Q0F Y+r'm.FpRnu9ȾVꔚgZR(y!aXi֫z;8JydjX&1h<|GWg,?S>_~Hx4ZF Rn݅mE5_yAӾU޲iM+rԋ N{}>nvpHhk%KYZvdxTAW5t?n_$N#f^9n ^ q 6}qK8lJ>xY-آCVTwY vfpYh F_=6 з1dHgQ%"{I8t^Go&g5-vrf;!!y ^Z(.1 d /.%3D88u,s7S?)BkӜpJ?2T#&v8m6)4OIby-2(4vB6lBMՖۻn40L&F3";w1.ϡLz>yFnE-j4-XfA O`Zj_o%5'S阧x'7slZ4^_yX>Y 1mPyIƵ^" %S\ b:xR~gD9(0$) ?}2%"L:eTD~iK"GIԚ2~~tGw{+RR!h'v1w P, XJYFL\u!;ANg@Xj,vMzvVpod5a)SBiW|9)W P"4AIHnvqܝ8d@? !jg=xwK ^dMr~hG$$݈]Ի8@HSؤ:n}D>-5A(mM:r%ukIARc>2v ǍflG=Dd(BZE&:J=P!T%Zz! PRS I4#ÛA*ȇ:(aj"'Jcج;πsM.mw`+))2~swg$^"&c%Y%zQ}3ǰwÀWW m]JTĞ,pBR! D=xt 3@ޛ+ 5M 5B 5#L> rd-4(|p)x~4(2)^Y "OgAZ&ƅ9Ih;ug=\^]#=񣰉!y٧@ ;kV+U%0` &P;gM/Ȍ~ AM9Mq'-셴l,S2Xu@wF:퍎 5[/x>wZk!"B&ɋit <)jD1.D˜ 쳗ձf0߸)됬yq·‚E`(2WTIM}E:vD:vm]~<ں9&t0h(@K<Ɠuz&~nQcWV2БLh\azjnA4ʻJA CkDRv>iX$zI)>`!'b/H\ӨĽ!lEP|AnA5ls.5V"GEug=d ݳo$LNݰmg۽c֯m00zZ[`Ա3^:90|- Kkk['9$_~|=6%،YwI_2bȕ2za1[)iAl@\1 e->p8ş5O)3+V!P)DpVYzv|݅јfW_]39٨}}-Ya}a6^_T;tzI=,E=q}?Ǹ*Ou)TAi2U6CrW`ѐg*R&9Ӎ8[jMN/.MOHMɮ\r ٜ Ejmui+QAla3*mn{K5!O\|E͋'~CG痿r;F2*dV#6 @,fѡ-TB]Hk(zηd6SyGI T59)c`0Y- wĕֵFn՝;\Xv3jC[x`w0DeCs³Q(08*P٢IZEbi+Xb@fd^tTٲIYD#t0p̕'KPHE'٩Ζ=l֝pQ=>El&ZD""qeKD`-a&_#|*x輗V%A[!s^;6_1dmuLN"\+b,dQjgB 16aaUv#I,`!>~٬;-ª\Gl%%E..vMUo]@)nz v}Lꮍ/գN=oϲ5d#&Y mOz{y2q _n^pfBx~w?y> 3iY0A_som{,m֯9KDќn50rOAi%yLӣqIwZP[ž U<|]]_eBYo4rG?~od],FỠ`-++-o|!N%^n_ͪP9RCJ'TRvm|oA\s:'0ꂒu |7I")S.je`ԟe.*ju$݉T&(tJH-X,Ĕ"jTIX%E*"sJV"t7` #; ie秃|[n=@xڸ1' }]&z,dK91A! /ɘ59 JR4% ٧^P,m[]̊rdݽeMͯ싳G\B Z6kg&qtGm Ko:pTA}M18abH >aʬӥ0&**`YU@%qzĽ ~5ov9R2 {Yw ubbg;Z#9cǦ s}Yh;;sn,X $Rt::ATRP(o7um}Tn6+r(vWח_(٢ZB_e}?1CΒYخ*~E]0.LW(M$0Q80}"3`im ceWGxu=ג|%,)1,\Fw׷؏TPvʬng`֡i_ǔ۠UFzDrMQN8~<-{zNv"!5~ M= oCIʀ9X ^5ؙ&٠,(mAd-\w9?vu=a-''{j=">M< !G/I&os%g$,tjm :H($<j=\ o0PVX3Du$G (JQJUh]`Ywk8 ;\{5:9O1?JW !+M%[V6I6U1 r|`yX )W?}pnq0A ǻ[W*)CщwޛD&id J*H%HvfZXGYC}.h*Zש֞( XlZ?/t1O߸GH:BHVZ6 bѩ(I⥮fR}c0=m8<e?:=U3w쮘Ĝ!-"uƣdԧ+&fx.,ȻD+zX0lZtmgJA8zTKVӚWBcI80mVſ'MDrʧ^̢u K"l.s3 * GG<*fQ)տVܼsa=7M$S ? !9O9rTҐ9!ݼ:&vÎ'ѻc#5EϾxlCAmM}Y^̳f-?F?I6j/<|?ky֎]%\.^9^ ׻, e\oFWøp}ݨ{ivnTuxetu4*+?]5O,Lljދ[]FyD\t[O\vqxӻc&|#^ws2'Uz/0[g?:֜jo|nsO~[5U3Pɼer%j$)j>.q׏Ng?DBN:K0̗0۝H✻ !U9Ʀj'I.#~ѷS.^3\Z5  4=O$с>iLh;ڞez*/a'X,Tz58nr`m |GthjhmU$ڂ&c?Tr~Bb_bKֱ;~\vڼ67so Q7SY&F9 ޫGY3?tF=ɮ7'|;3++GS)H[-CP !A sLꝫNBe@5"Vnx)C1z/:|[Z9ϽG]އqJi+?9yB%s= @me"*5hR`)ySd!*噪L']eu #"n&)3PX}e D(Hen&DVzmC &J( !ʅE!I@ZX(b@702QǸָDphUH 9t|@)(>wSn^!rڰIYU=NG͍9@n]` ūWQV,T):,(wY5GV  J eTJGHcY`T{ұ⨋=lI!R;.s!+z,2_#[K̺j[I+q pFGD*XGfpJQ (&y.4c)c$pTwxpV#\D"FB 0.FΆu8f|~4{_wtǞfJ­W9]⣝,W+CO;,,wFWΆܤʜQI"24-M`s1GF=(q,BPȺ#Ip[*߻劐s ut10 )R'J3Uc5ڛZQP"EA, ҭGAcP~l9) ~ha݄séHνCOJZJK/f5xAb^C9Ie9Ḹ]=iA"!$Q 5D)EKhFzG%Z DЁTVR dBlq,yȉ /G; )U>8 >1aD].M`,KοwLާ:I&{5wX8́" ;$' w+]ƨ/NbT{ӿ$1W1G'$:ٲ"J3r5.HןMB2~G|?[60ɊZc\ZLV W_C?O.UJ }:N&4@ ++>g7P[T;whty<񟵻˫ɅWb7\/~u?4+r!>li{WbL/ .MÖNc,6C1 ?u`^żBn^t+^Έ/Y/le9o T׳FJHX e0/FR'{O #{Z jP<[3Xd?wgo^~&?۳oޝQ^ٻ/q8X&|'~ݍW?Ԧ8tja9+֓d^Q{_|$ڠZ ;?_}y#2^O[է?flb> _AQ/'CTe*- Pb;/\'gM8o+m]5-jRuA2+2GGU>;t\6q7wG 1brnC>=\>4Ì2wndo0Ó~sr< ӊl5'4js_O.~>5yڵ{]g%u!B\EWTr3I>z~8vw TEܙ8C \̩Q1Da|T 8]PL*MˑTP$P$gAO!Z橏Pnk T:RDUK^D.~[h7?b|U ]{ xx]%.mB?>GєaNV^OMOqE TAܵi ZP <ޱZ6jFU !9G 8ezTH14N AR-lB%UZ3#gf\RD-u!'unu^uጢn|yfNOG#?лx* >/w`= 8 K 0^ k&iylbJ sU%%ec24Oː?1βRy NBP#pK>&.Q qy,ZwliY=-]8K ,Fz}X9qͥ9cшc[ kDjV#bOh =| a-aPDШHJ/!d eF"Q&IEyPR3.8 P<ܧ2P* 9_:95yaum,y[x*k{. %GxQkݍJQ7u(Q^mhKObd% MTlSO h>P~uͻ<@哫[τR tT f 5*lR>h' 8]I284%mŜD&H WƁ>PBDÅq9  Z-lT#gIqA믏*@1>H%6deN.GH4$` ^ZT[^+"BCVRGR44ZXA5ѐ|`,sp%H& h\|mB/t>,yc!ܪhU6M"PeʭXo\Ƹ}@rVFp%d 78kȡsțehed,gxjG}<6,mj-vr"j:5u.( V 8c4hy8%hDh~;m<+p7+_K .S 1*e (ґĽFxl6wt"t hT6**H\" 40 3km+IK6=v_oe'HdqUV,KQx&D<$%5E>nlVwdLyCJ:rsRԊ>֌ :J=7 pe3l "o͗뙑~P) gSz)UVgs, \ҺH$+HDt^RVexQxo޾م;f ܔYÿAAg#W0Ʒs$o ӛllO}KgU *.G}Q%SeN#؟."T;d;::/qtRo*= c=L;%qN(WSUL^-I)zdz .QʐRL8k%FDҡ<m.ͦ$̔} ?}J3-Z4Cc\5&<~;<}xzpsh{߮+Z rie|ڇi_R h 0n=umd׻>RL|*;^ln ?B@=˵V9ԃc--LsgNDw(pMDw$;sqH(05,><,.\V |M]׈@/.'nyh Z}hLFyNwZ,g >| c }ɢ.Ыά}^mJdr)tqzGlK\D'foc&=׵ =bN3ow2n|M.v(po6[~$f9Χg\:Vjw>T;OD>|=136l<\;1f8h$hWKWY2FEdޅ}G:+Q4a>TY}>}gNJ{(c+*2PEQH]<ɢ T:%|q Yxک'5q=K~'F_5ЛnWEȭMo<Lot}G;} 2,5N0g󯣻r^Owvc^3yKRNsJ<1{Վ?޸Y"zAJc*e>fv}~|gs@|>Oz{&2M+lpߣ:;.&XL+R+Uo~sND?]kpe|;VkrE|4&PX' y;Sj/) "3F+{䫌𮳿Lp^p/&ུ(2!bp.ifL n87) loQ>v9zuuwr׻ Jo00ĨᏏ %sBE>up;2VtĹu)a[:2+ءI9I.>iFQ٘bFdeDd4\*EBm?]>29-.d}]Ⱦg0lpZca/~z&IU,EY%g> UI *gdNQ[=$Jdx7: 1tvk/Ws5NLFT!jEaTVF)NɰR(ׂF5Jq)= ۫p ImGR~z`N$ko vTSXkXҶxȁg`ϕ~I˛,jr ,^EP(0I oĚ7I_:+Vx2Kpj81Cq>ɴIқKIS%tᙂʰ\3h"ՈikJk6 58od\^_ܼO\^ΕdWLdwԩĬ2dʙT\"$:(ʁ\t|o2Ŧ +Hئ#;ɢt;+22Pφu~xHTʝQ%MJR[o5R[T*-[F%Zl {5 /4*)yxtugח\͙zr2ִ ғ\sg5c[D5yG -89}L>3A e*N"I+0ɨSpԂ7k3gǍHwƈK߅?kqٗ`kq#.O#5w}žV>!bLj[sŠְ3+2 T.cz/p`0ch}") sP(Ce35dݓɫ+xX:ػe1o3~U7[FFKI%NB FJ| E#=>Jk?j%>x("׭kg`'Lb6yݽ`U}S>s~kU P$Tn_UeZ%ƁBx{jr9|rf6F_֑Ս~d"bR([2L dJ|bRỈS1,iw-6WBb͋}ԩԒa,Զ蚼Z,Iz*#ǛOM߇xb BjSJ HJeCNeN>3VLɯ|. I+39F(JI8E,[WbU]vp43#Uur2O -]QCsP +&IHpH&!l4 3$mmiwROXS+ƓјVaTqgBcԈr3'1^W̬>rLK@Y.В.xmߍ'D2>kכ$*UC!->S" S=OmVh %W^0Q/ aתɝm[Dkq%Lb@$`I$p@ RS.tH 9hX\6Q(-dkH33 i5/H\ 뽄 & .bڧAb rƋ  3 ^Lʰ.)a2q xD@+'#hʖĒ - ).IE D31kAN-7v TÈ*AIӔ ,(y'u@yUNu)aJsX y,b_\r0Nz**ˆ NNp?<6”{XLW[$DPA:EH=$(Y&(r$ `< @KzR(9B?{Ʊe"i_ sA3`d jq, I9N*^DJbԴHkر*ڵVU}չJ'.  -*=d`jm ;1hڵ څ1  0H1-+">X\46`362A)ԭL1&H܊@@8ڢL,\TBN~Dhw%,m)) Ch a|Hvng9R`QD6 ]{ #0z;KnCxC T&o%!jUBR@"0eP{E:Jq VAhGcM;f4 mNBWgH!t9Bhs^ub-ȊFmJNq:XDڤ tdM9vih#tM39(k$ ?AuH)E+Y=y7 s.}hpXeB`v",UPk6֜?HeO͐Z4M`QIRh›W kTrVYAދ.,W039b@E}A׌oN6JX[ҹd݆6Z<ịgoVo':F]ܞ-igɸ\ӜI(Kd0umQMW-d0LB&SFT@`0X,:ZTkH繨)!  bBywP=7Tfܵ9x7tJxKś !u>fC N((j =؞`5\߰^3úec4Br"V+*ϕ+" 5r i N:3+v*ĶB!DE #HbQ>TCz A,*1#dn҆Es819E@KkL蹑~V 5<֠T/ XH#jJҸh ҵ5cv\M/٭t Be_j;J*C@pH`vviY kB3`FϵSiۺ[FhJ$n z|d4C@s9mm!$rUj1]|  m٥0:Fff%Ԥ1%\GCdO]XrAS@oAHklץkT[]h!w*v\8Ø ՠ]mjf/nL[ؗ2 c;Dy@u}<eNlQL2 |xNtGuWlJlI^d'gzg}cWɇ1naetY{~8ro>"MU*>v?%comCYw϶c pYz[*mVJvpǐK[x,mJV_ < nIM0[wvўˊ ^&6q916m m(V?-{.nCm{l_!7?X?SraQ[򶜅u{=}Z7ᲭSV_ )c]_.YyoQ/og%AζTkk0m(Һ{]u ҹ˽Ճnѧ_fW*}=fk/ !wzJ*b35s$ƅrEA{€=xp:d^g[泳_r2/R],ڮY}}f[jb薟N/^\߻hRhO/}&G[F!S\2βǷ/}?4m4N &;8iph #Hs>Ҝԏ8!cvh~yq3^{]o}Olju#(^o7l<0TIʵ6 ,Ƥի],NLGxCg*u䛏)n4ѽW[/=^wF Gx:JҗKOv3pŮB ,'#fPIϸlUNTrZ[^5cJ ^ 1 SIdTd)A -cm`x[\'g 8j0X88xJP]>m>O3(ty^M޷vb1GO칩otO*$shS 5Wَu=鳏C&Ehhg7ą ȁIuEl>7exL mqkiJL"0[-+RHZQ`[,ƟﭑH %r$VxBέǙnIw ߻CZ]ӵcgG1^9hODså@w9ޝ-ՅGjq4hVWn޷4߁pM~\]͖E4pEkE_v[r1"}T}PZGx $tbQ0d{ 6Zsif3uҖ['wM:q8;uMɾú&XxDd SlZP ;\;TMYF}{A"fU$Jz$S}o勵y~6@;YWW4ݻ?^y瀺s^R=sϥs:)dy٬r>~ZoBNRȒU+%4+]l}/zHtmG8tqo+Yb^g_/OCN\/ZA홂G=>X_$ay{0;;.Aahٻ2^@ 1ED=u)j{;ѧxw&9|-1008b!fe% BpuPfPnO9][ӝu RlL:M' ,v1Dѱ?#<٘6S<;/zϾO#RC@a:P!Y`y+EbkJg JNiYzoiN~W lq0+{ {$B3foW'Ϯէ!qfLUQx ]z]0wNjeE5DG2畞uz8:ٜ9ΌUp+j_ " ijPjb:Єs]E3)PUFp_J˘Ss(Ee9{[hV՜7px>z2?aڎZ[n ZF;?{Wj=Pj~|ѓ>Uzp $ZtuhBNnSmoNvuz}:[vbU7K޺r{hG1Cm~2*@i6#:[w}whw\ço ݛ *a/15N,Sl;7/ͭjgV8h?(5· .pG[Eͣ~a6BeI:s" 1 nNJ;|ȵk@9 ,2>c1uT37~>yߝt[O7쳎a/c1物g0xX%_)z sYRu&{"Bt7w..8v\lRb4ɚ/,9k $\wwȂN//{6nv_N~WWӁ)[y۾XhX{8ؒ1|c(wjEulM?WxRN(+2S\uZDܥ/)^|b|G\DD ~Z$2΋R(g\~5%}~<\lT5JJEmS"N@Jb0*˂;]{f&&heR,K$F&' .J8rabYV=#מo,?'>[--Q0gi[UY_g-}t=NmzPfUJ,]zZ(0d* 莂'TxY޵q$B% #*@ǻ de 2)tlequ$K#@CMTUU]U9鍑M; IxYxqZ /eRhr6+.**5NJ=+BT p`o݉}>ł{GyWE^VVܶF}R$sVXKA< 4; +oE'[tE)b؁D( $YlTa72NXY|L$zEV;;<\UܞyHӳq8?{%ĞN6 |yE91<?点If[2,`KU>oQ??/.]WSa L~4=t7 dM!IdwǻqO(Mgl!5kf)HgAzVaztd߶A*Gy4ӗҾ9]_>4QoMbxe[kq}x!Yp/K^񎷐Q=+jMti=K '㔭WԈܓ㛂e͆Acq_^}ǟ^տ_?_~+}˷wxf9uSPS&`1KC_04u54Z#bh|łf\#XyC;;k@Zp+1ɒ=Ng{&{ AZ Oh3zqVhLQjM2 X~UO㾄FQUZ Dm#`O23G)۩˙1"D@N=)I"T Fk ^C2VYF)%Am::MRklju`O#+pb)cfbji˚RҮ $MFZvB:9)BdN̬;KB)GQ;^zsz6C׼+0`o|+-d/1,DhfAVk,c 󥆜&"*Ƶ>i$wKY[O؟E" o}ҧGf+!Ֆ A)W/~ m" ,#&ҐGl}&dM`MvZKK vFu'{KPBSP mPv9i,Zd90$ؠBeުo,ӃA-O񧜘O2-3͒Gf|wdMvTQ덛c$자5{r߾4C8Cv.*D[LcI 27drXgu;APPZR4ho#T@"+HHc=%3_Bj|W$bD1HhIHAȠIB&(2dd1Dl:si.m+s(v))Ntpn6G=A 9]1=^^%c,iYpQTHĞh< ,\H ʖ(*܎ST@'G͠1O a I|KXPu;(ķ" ]F>YH; c3kTb\Φs3\dG 3/M,(ni _ΚVe*-6D(!ޜ5g~Afekh  !2wTW$ w=m]{q[VG\N$#j\VF<%u>%u$wq7o\ h%O/h:O@Ⱆ;>r]A")k8`քƫVb>Ѧܟhdˎ,&do8zkקWPeo-xEk%vՕyOky6bL9TGbL2 cQ!8O9. ngӹf ݜO$aOn:KfNͰϒ@ Yjy{[=#YOs^s>mB|21Fi07@2.XE=|wu6cVaG&cp9) d;R I,U)+Rx=`]P֝\_R c)`3sɻQp??yR9!] ,0@ $B' d"ᒡ>A\궲#U鿥e(<)g#O){eXoF#cL ]Wv5U4LڂNF pۣ]!LND4wܟh.h69i]̲֜Q;UY&gmk-nSuӥh_}+ozǬ)R?T6AYj ol2];8؇IAѩeJ MSRɱT((vd^J2v6[$_Rb ;;B-:36XWOi~S'Ӹ`z_hpō1*̬_XAFl`85uS|QdkmVnOFPeOJI[- )J'M>j:7[0rWvgqW-ڋVGJl %S-$hWtYڃAnK㊱GVJfJ`/:֤,UYa sT%|H$٩agӹN}M kh+3ET[D["^QӖ {_%Ft @"_2$"#A61dh'Di-X'urJ3!M{8r/zN{'!.l:7[O2vq,kb]I]..v:/伍E #"<ё4D|UnjYX4.2JŇb;Cޱ%:5g'Y>j|s)m-t-CV*򳊛Zx濳|;kȪNUMwO7nrnDPFd\7 [rq븷t]u|xuM(eWr{][F_ڦk٧'4ڷqoyx(?[6ٽ#iyZbĻlN#o|'I$mK2쌶I0*!Ue9y,ś$xJۀtP)E$.$%80U ]rZՎ>v]ltnv&N;VcwWW{_|YݲQ볝yqhإ/mڡEfs"QEkr>KmȔB)^y;QޅtwO5I\_7j^tŞ`tc;[02ֈ0/=EehkPsۃS2,&K( )\yݲ #=M-8CT䍳M'u)N˜T5.}k% ^Frtw&y^kY7JŲb[=ԳرiVx~΂㕅r〝\kbc'M(I]w%H7@7R;XGyCpCNVgt76I9? NlA[PGIsf{Y3U:0rCʞq# **{Yb6 \̮+I7}Wϵ.ltWcɁ%S`_ k #({ߕ@R_'8}N_W8}N_W8}o@5z0G~Nciγuz:KE}g~!ƾ`}`_0 }`_0$=SɚziFxF#o Y9;x"uhx T%EzxyA:G:| kD)e!,:d(Y bi ] ='C]+C˛m4Α֑XKb͜N{<ٿ9yմ k¾q44$:pasVbu5HqK9wZι\O#KHQ{wmҏ"dMr8OkIʎrq_HcvTU>-Dbs) 帢\($w';s~ت9e{{>mڻ7={( _=_ \'#XQ p`!*'CpŤ$@)U`P0FFC4R9Q᭶&9 ET)ƄܢQ%OJC[Lg}v<iY^}}gvٵ9Wt"K b~ߑ?%2d>XS $6)'eG('GÓwv ^(ߋ{**9j4sTFjs`l(8ȅP+*EL]|N t?r7>}}z:o{0zzzbo#;Qʒ9uT40PBńH"H, ;puQ1t[,QDW^1`(B|0hwJhmEi]p yN^O>ÏDrFr;4=[$«UKcyfǓx `}J"y8rLp^RDpzIO3O/C>Z]Oy>?#z@ ,EƬ<9< DoRC\ :yRtwG;tq|}d⭞.Ea1ңz@"Z&aO8XyH6#'AuV_ꗉ[uZp`hPE#- ޘBK%x6Mj23w@'Ax꼤@k%*&gEB`DɭdT׹\\ *N gjs67+wnv2WH=Ͱ?X[fxy` rhD̈́J޼um8={ҹl-~57X$?W?BevhGcxOxsd&zR7nh8&;e B_|,xf{Gy>N *OS BBdB+,IE\'Z[,+@87$Q1aK`Φhd2$Ew\ iC^5Чj7O~AE7H&kN[? “U/xז{\GϥOC ۼ>cJ۫++GjEE fר7.9Rc@azUwRKW0aLP‚7bZccu" u8W;0]>iΟՎ'+F@ӒpD8eMm1"n6?Q/wf`sCmݎn Qǣǽ&>QTTb]cuGxu=x[iq>)ZJy*/IWYBˆh:.؝ ם wgĩ̔B$n6$`hVG޸D&pOȋ˄OqhN$LZ{-v4`TxQ#u` `A牖4$Osه?˟A>qzu=SWg=Yzy{lrjk< ڷqRxͥP8^Z6J4_YT bAU Qi|ii, jA {%NzҡD+w>?=C6y{y^xYVKX8f@:i 9.qB[H,x΁8Z)!ED:υfm#e0Nj(Cd(]PCFs'QVVYpjőnƾmmj~yU]lYQ˗v7=Miy-k-|MufFq'l4e4hz% PX'jƑ]FI]*?;u^ɹ 0 )R'J3^%)d%ojEB !J.(OBcPaegOUXQ%pc"sPRbYM'@ЬPo-cNR٩Qr;31$KR*5y(4awD+N|B&P 3bp ?i5ȑ[saLE8О, 㹜U98>ի1qD_sj`lk_^,̤ٟ']WSB x$'[N83` gTc8SQB9agBlE?Ir۳Y6_KsO8j=!H̑Q:n4t7a7=X<8~8>_BZo@[{f_t/+o L/ެ (-\4_\FȮ$Pц?L}o.p18|#֓=U[7UUyfpT(]aokqsݻz۫G-%˾) ip',|>c f/jW4Wu* Ց j ?,q*]*1ubZKq Uj]dVN-uX)XJ>m1'?Y݆#x@s…N&ND# RbAQnKd9 QU Αٟi,ИASxuvc8>;mtDhsrёБl?0ܶ6fe; f@E)j&ѣV$QQ2L#ru_(='GO!woUȳ^/}w0v䶌vB7Ii 9bP!c08g븘+ Q oXtA5 (r8h ;IsYSy#8Ÿ( t"8R6x1?8K| u m9n8Kw9}^z9dr2aj08W*Ҡ NcIXmE=*#ZXtH2t=gR|cLX[J؄Ng,&nϸӌGBN Bƒµ79ff{N|<*;> O{#p@4"6@D `.&iylbJ 3S`k!60ceɥ *aGI L΅%J>8= Jpy*^v]q9%○D[I.)T.b' ꐙ%H''gx 9΢T*CiV@ZT\܅\<yTږz;a-vS=0 \ץC) f70٪f· ^owy2THW=0: 5nիC V@"{ў[%qx))hbHx40mN9 da5Qq8YLx̂V"Z) ޲9kxA?|=P\q#-}âs!j82 ̤˒̟ K:k2uDG/+kY =# qptdCX 'g,ZbY-rZl*2˶{BC}g!6n3 ]jH%Civì&#f'RĬ"7j%ܘ_<& CDV|: őK9f֊٣Ф1Ucc4=Pu7tM' Πw3 X>2H7mc(>A#ɿHbف1Ǡ|ķW;@?fZ>?vOBWKY(mEaoGN֡nG%U⋚2$QVS ^Y}!#g}[vr6$ikB]K/KOXFz)mw-;5Yٲ{wmt ײvwu0y:jٛL~ּcw~t&ah83w~(+緓u}xpoO{t{_6z=/=X'nvuQAAh>ѣ9s*%+ B|"昵!7A6rX.^ f&Îg0A:Kp@c.*Pj=\Buv2Z1Rnn,ݤTH ٗY)_ \v@/3YkC#6LfvUL[6IBW$+*qUܗ\7xZ^3CzX,XĆ+=iNy$q!؉MIbj>tP)]gq8"qU6# V"j9. \t[W4-L뭟S xR_{(W Y9m$gMC'&H1uq~7dh$95QROw.#=1DcD-4QIdvbՈiֻSƙ{W#ײgQ+՞ճ)K\g+׉M.S;]5 kMRC gsЦuHւ?E#[4KPy,50~Eg+ j6 ɘiޜ,lnw!Z&Vhͣ6F(HIDEBYQ:W6kYĮzoLӎ30k!u1z3H,@X @(d ivu)wcgwxjFCE_ѯo^FMj )Kn}Le4NЙ Hhe62Eq{kNآ 7giyǂ|jws9jD'1; \0't a 1Ƞwml?-I8 })@3հ9.hY:{f:3Lᄍr].iT`.Z0cQJXKf 2_yJ]ݴ̘ ;*7N3ʆ\"EHK}! ?KeȐ!kg,{&"cN!c[ѰON\Lx^2=67QlyrN)bcȭNbBL+/|;N9v2*1N'ǹv9& hS-6"1B%S톏mԳ^n^gWR\R|ڦeR0ps7AMo~t%t;iaoi#h-@1wόIlv4^SJ㡺]̽sx_ HdL_jY*<@\u/3G;-MK YYy_Ɵ2 5 &҈F/_ .:.[mpms 6 B^%@*2F\ koj혶]^7J?u[,yvv';H=mF=kH+g ikBIh=ry8~ڮП>y m +ȉ`rAJ1b6^.郋}l2埻G4vTkXO +h)\'']L*$pik)w]zKf3G?}0n[QyԵO' õ˗9ō2sY {nD6b&sˣJeU,ZD؉D(g!a':r! d4̓:Btdw":&RR"ſ@d)1Z #湂DLdmYGHϼ]\Z4pFyjA#V/T6_䡆<3}:Fޗ1$Y4׷w0}bϮ8ehQRڶ~5S$lI_jCn=h^&*p4GQ04 2@R * "R\E߉S|7 Z }Gna\vyno.D}G T'{fIj4 h-5\gј63T40E5A=S<촫 Ω#6NTlVnz:d{}IMh9&lxad@qcE֤I:==q|XqM^_\%^ o@쾊?|i'KF@ L^Ε&0V;CR rKL.J#$oӡzrC>zKM)@9PSy+eQصWҢ^xQ Jq}OA' usM9pOk5A޶MAUYڴJv(6\vg꠫6oNDlQtui6E#|:X\yo-xLAGtCRC`PLHY"Sރab :>HeESB(I02t-Z/J:&A:!]>st,B zkY1;㟘׍ڹE*Zi'uߋW,tTZ=`[͈tΦ\ 9Y3LA,J8  X; dDrQƑ %*O $sg^v\>S\wd N%sc`ҁ  qDBb*`@6c}a&b17s{΢ܦ̴F#h#rh2Hi6"(x@k㿗_N봛W|5H,F6, rˌRE^j"tӻRFdI/y ePa5׳=9ytS{j=Edk9Y<`よܒ9PSLp%me88}􃯧OU{f#I$f~ {6qr`jq-6IV!)R&%YlQ5 {~*`=u]"VeEIŵgK^8i7'8Rܤlt~$!tYs LF Fh_5Xtxx0>w06T B1xy?!hpe5mE¦c֊SXBJ5FB&OWfluw^47]O'aWœO~>9mG۫sDr1nlhRFbUKlI-jl 0(AE\@7t՝cV\ꪾέ:­4lX`dz =tVvo3 zK̉?ڗ*ޝt_kmi܈;4Y^O.ʒ]^6m/VF8]rY?6?tfUC҅pݏU5?oS FUץ2Fp1 GV_Kq%,Q:gC2Ur(Ӽp8%G7w|OVm}g$#,H2h˾[σ@#`$i ӤaǤJQbX U9K4PJ茺M{G\76yHRÝ"O>m:q#YyBkfI̙FT&TM+6'#KFw݊xxkx%zx+Ì[M ` 5R #~?p*|h%\yF.2 E.˵~9#`ן~\OZ!Vi0zR+)aO+›"21U>YęS{ObxBTp}}r ½e:yt1nhrlXp+2?6@󋇇Q`ѻG, -PGGB}5s#=fd9n %6h>io[1 B6*Z< FƯ4#ty\/f׿i^M\f: ?ԯ15~U-9u#dէo k e_]!pȮ1ܘ>y/3ɞ ْ 5g9jdm̽;-H\>x_g o%ȵuBYd\핐Ȥ9eZK͝Թi-Zճutb_6, '\lNI&y"d,j*g9 %GL9#<8wQ} bf @2>:g˥ـEvӧy.ƥn%I o];vH8;jnnͷ 6M~_9LAeY |LV&XL`IzҒzKKʭ+- qǒS^?zd$Q!aJ45:Hs-Wl98j[ΚQO%΍dgOk䮶F:$+f;4%6{Ru&ǁ?EhD#qPX'(. .:uJ.%%33xm^P {KHeՙrgW3Uu!{ےSg%T'LhMfh[U',(!E(mHZ %7Aeϛ*Ѐ l8]R[L$qkML(}8+jz3l<-{}l6ܖq}-+/+.Z}Ֆ/;q~{y:N?[ʷoY֨הqchƭ\XFN4KB9 FITэȨθC쉑3 '($¾/(LM|r&粗~@z?a% 06!h6(ZVG!L+EIQRvÀ ÀƞpȕJx%f6 1waйC$hSz8#㜉-]lviYԞC:<W+SQ ZY w uRuQEɃ9zRi@0fHXс' X %f☰q>fI:i a1qnƨad{ۂ}FD!b7zH`E>X4[R IXiH.e tF"UI NmVˈ3>`<Ni*b܎O|j8'ʵUR侸 "pśf|کF'h\hև* IDYap!pq[/xxaB +R]Y#o6:aMG~J)`ٳ>ҫ} ȔaI׆(SՊ˒ g?nH"W9,2)iU$҉̃Vl&'Gsv2R"IM\NDRLj!9>d.OR8".>_RL۩#`peJfa9?ԏzlԃ^MS{ܜgT.EN2/e@lT*sBxXK)lLaZ)o坰Aw9"a[t` ,v! ~7dHU Բ"᥉y[AH[o՛I`aȵ:34u2uT\\9vb.BF⾗ I!*r`Ľ"&霁e1U[k4ϱ¸fƕ3+Ǹ5i7ŇĶm}jGBL_/ߖ:avwX;HTOi1:stnx>=F 6cr?Ox2huզa۫{^L>ZSdxA@k\VZjOY $I&q:KЅZ6#{&|} )\{~j`q3iUMX}0>_M cSv.)z%kC D-M 㿧YW;RhmwV(M4~z97&x1NzPٜNF@ZÕAߏo-7poEz<9a5gwSSE]j4mh7TeMNا+?u855~0qV5yG^•n"2ЃK@/k>/ /i=qӏI+J;֓rps6)A2l,z?Qln^WcV.4!\@EF-c'G-϶ٰ|?5ޕ3D|sկe{QcXT'w~m洁釚ޭZ6~1Zy T/ZtMȭfg5rp1eIS~_{-J9Я3uIOR_wTka#XBZ rj3`3RH]5q)cJr~^6~Im]%EV_7#ư`\gSr\r$(K2iW{pZ\fW1n.8C:kIi5(54Y,$hTFqTFI 8"35)[m>ꃛc!%63V{ 6Kp7fxmt\m |g\m(jCivWJiyj{6Tr VL \\ \Ro;\tpJ*.+X+wkHvB)y aw3pRvPZ T\=E2FK{e 9/TEd*׍/:5~j<`>a4L~#kjb** -\z~4Lq`UQ L[K\` V \zw@ZCUVt+A'VLJ/ؐDž/גgz8_!rEFŀX'kYB_))R&);^šFP1`4{AՃ@gvr!CUѶUWۼz[nk3'9')*pʋb{؈/_{!RCCFb#fqQ&J+jB?) =c\7w0BV>MKA㋅i^<:38,xh0Da6$oT" )A2j‘ISE );8}A{;g<ہ{?^PF2멪 ? 2sn݃䵙xA^ ~`)`^,s~5fT7S žʺo=NXwP"?'PErt@+Yon}E7=u9\ΤA5!Y:dA n>`9S 6ݿ_ 6jVR& 1A=Khn,"j 3ꄷ䉳sc^Ğ%6j#M*#MzXCLA{]Z-zדĺa](clȈR`d3k,Pjtk쐨T =JEʙ5o)iUq(SGFU–6^7vHbTA0q_s͖s~lρTG6`EEpf0Y2#L(m>=nU*k7?amcŵEK!m8lD$D8BE4F.;l0!јBm F4[G`B<˰[X1*IDk1T\ͭ`c",{.w]7WJvNFC9LuٵI@hsשj| d_ƫ:Sh{XNgm6C~n^ͼChAinZ;=yQ煖a4notspg%,;vPJ<,zh=kN?mH3dM\1k6q6EA*I[k6RKk 3,qݬt3.lyM\f02J͘",F4;˜lgUFmfzSp C!fH `KFjRz\Tƒ=s*TkZ=jBE]uvf"C!-E}Bu@0?tv+ aJŏ=.}u P]2ZsϧM*e"ØgVi9E CmkRWbԕGquңQT W.8n $ 1˟R)DY`mMg^ӇP'Dܝv> ջXB;gfrYUYy;o HOyLE|c:cn `O<~e=tSݻ+ݪB-O ZmuW-PU{kKM*·;ܹ$miU]*/ȵHg[SW>H>RG\벣X|^C/j:ryC+"/3Z9V[/TΗnR糛AoK ri-ޓxkuy<)lI LR obj˫q1 6) &onkv`iaND4aGi+7b{y+3`V[(S#â#$"r% KEK M4r41>.W+MCI ܉gAAojP$aY1ƜX! ]E穥`=%ֳ`FµJ8n6_c'I,}u'p:Ol £UO8xPJwCJsx*Izl׬Hk1;&YdBd`LK8Or:x?-c-Pv.]zV;3򳺳smrUR\xq6>[(SX0R)Ʃnb fLF J|Ҍ֝y:f_ߋ'o/o&bsb_AR=_م"`7̫K3-m? a%F*E׎!"+aY#a` Q㇃t1{WɆi^=96:dըUsUj9uTBrQ`#t"~OqgXMb)Û:St;w} ?x7Ǘzۋo>^`.^Ƿ/a$Tکz'~ߍwկZ7547*VchJuuMNbܻ#\hsf;k.@~zˁ%q;orٞ Vq/_E޸.r~^Jq5gwsAlth#-c;*ba;Mpz8'_]"2HA( 򣥈iHɒC0%02p؍IɁĹM>O?=Sa^x~bEH1Y% JD*dVRM" k[l.!x/!_Pq4KKf$0PVI$Ϧ#M0!&/hbˡnaߗyBc-|((^\ēbm&T9Q1I: hzL)7}#c_irI^\*PAkCpHSd"'ZRi6G66ˋ=e~^6Ś\ۃAmv/5 L"ŦZ渖}8=q_h]$fgt;&tXI*-fG6%ڴDLIpQgzihP!Eb&#m(dѵֆXbV 5 Cmd轏E? }X`,kL+#{Β9|˒y,VԘQ'>|B1_~`=4=]ϻ?~̭d_cE=`(ZUXL~Xa2<̤?%g 22a)$(U5` YF{WNQ :]Δ5Eyg6RʆJ,U)+Rnm Ԅ&Xu&Κ;!{Nm'MrBeʗ'f!% dD rH.[ ¡e (x?YJT1C`#qJ3txԂI9ХCsY_ɴs8Ppm_: '+\[߫eCwӷ:y֧V>ry'gU{ 7?Ne>Te[2*eS)*mcPdޕC7ċOEKГcQ4@AjhdL5R*a`!j10; 껝}xԋuӶܿ>66↎ǯV|dĎQό `&ԂbI'0r%[)0Tg:k]E Br + uJEDJ;L'銋/=vlrX^>vcGlx@KD &S`r"N6=qx(՞_Ύ&eG1ףTLĹ{Ύkx_q0"6D3%d-A&_#|: dP'Dg@hS!v aI6uR'T )D܄fir qĹy[^:ҒMqbŻ4#̈,yC.h( V.clgaqYT wiǦx 8]!Yѱ,k?TEEF\~|GcVK76)oomeǧ0@d.d:/.hEB1&$i{'-@svF$ؤEGcDU;P]HgIRa*úA6.dRV+80 @NQz,šK~Ș8YYɦm6&_n'|;<eҳ p^[/޸)\} XcFiw04VqXu4V)ѶS_)R*0+WIy(pUzJiTWV*  \Uq;Z*%W/4V+WUWC*-}*opK/! ,]Uq?:pU-v"{.\1m?F׷[_wl- 7O0izzv϶J6tꥲ l: \Ui-;\U) ^\vj<=Ӯe=f|U-b~ʿiy󷫒;O8OfV1Qx|spCٙhwtYŏC!P5:0":5#eIB*)*|g^WO;{e Ҟ( $$فb@LVxl#mBKYQyx+Ol\+ -ms#ƍTXLwRD_8$׋I5!/8 {y1eG]ۼ9<3Ƃt\6w{{@կ[J-$`&Z9)k 7,3}z/q&߯l H6*=4Fe =u-@]@; zH(#j:V-vBWTVh:t'{E&2Mr[fM=gy-< _~BO2[D~ ^IT!BTKRy6)M<.D \?#D؎7et!$%i1GK50\zrA;k2g^?秳/K!nvvʪ:m*კ'W6'>_rj())1e F~2N٘ED%a"JSgҲ.)kR&',!P|T,*I2jb*Q<8[<l:=+5׋Y§7C+n(RP*skErv9ZܷCc[G9X=V׺qnZg {(e/tiϞ{ {S0?Wk@(2D+I N@&z WP1<ٻ6d*IrvGŀH88I6]`C55THʊw?3ë8$E6-ڡ[2g]]UUuu9gbNUEV'q(zW1 \J4.cx3fys4uN5ITYuEs__XQEy< )Ƹre*oqF@Fn&f}ud0\ޛ~WXߦrxo7_?vvLԬڂtBvӓ'4$'ۥyMl5]լۭIjRG,7aKNj-+I19\i:}ՐILSC\붡|C 5 wԷ;r1dw6>e>:&`6xujDGp۟Ij)oxt`LՁIhM5&|khCnX{0Ծx`\+m*}ofڷ64{CcNxS a5H&kܯhb"M$9;9Cp5] ОL'2aT4xfXwT>K_ f0EI3 [ZcI"kaL̘= Ȓ]:+#˟?R&Ž-[+lo촑h1`4+M9DŽRnRHs(rne Ȣ`;XR.kS 4B"s{,kc:#r;v!G%G`CG 9oWo'DGJ"P B,K)Wu(gDU''y|RJGQ(!m8lD$D8BE4F.;l0!јBm F- C0!J[X1*IDk1hFs+%"3FΖ' IA fjo{Mhhj^ ϷhpC~.3o:9"7|A˳sUD1żv͟6Wj *Y6Δ_2) [}ɸ U'#TTHHHYC9 3SΜsaKNkw(55Yt +7a:ʽ3ø)a1mRz0777Ewzꓭ8lP˽ȝ**~`ys«OW5U`A`p 7kg@@6WDMGR&eo ju٫~mz݆uyшLx<10Ӹ Œ9K˭.#:F oZ>IF~(p`̖D6.:ڮm>kjj:P׎ذ~Jr6 / J\ [=u"&%Y iVEϞm^cR4>ædVDMvjʹu)L`>fس4,[ᡘ01G6m59{ Tr &]'K/s꽝.kNJK\◟_/"|Vܷ, 0G`#-}q^j] 0fQIE^y+o%1= ˦n\22!J9/1%gB:=9 {$oK';T& C*\.TM |a"!u`oCEyq1hh1hu _zUp+`~ū_~}?ŗ\`.^~/a f`l ]$45"{}#ֹ&]SŶ蚒n'|~y!wP| mmtgҷY{#2OrPu%9;)L ?JqkN{µMT}Lj7 vي1aYikzL![l L ԍ&e8śP=#2wdZ8&nבWP,%Ipbh`9u|7:z"tDA ):ja^ɼh$a~ n0L7V>a5/8kW^67}0#0E.~s9BQtч1EM@(K@`SwLq`yڂ1vi}`K`'VIh"~15<`PEse8]VF>?|޾FO,yX܃ <%f8xKQ-0w_zpڃ s~rQiݻsq1("(b^2NM)#/ӂJ+ȩ՟4$UGWWO~ɝhoC#^ `{# $ 0:O)0$crOE rPTLH\|y' aX`|^+OЍՈ $gAk$=.H Ym|(&  :QQ8 $PPaaR"r;rYP?˕L!.*J:B4 !AWL6X5Z_ 6QcFe^xNn4yHV}Ɓ3 t rcH>rAYf#g>t9yK,ޱhlF$5"9iēF\_a0< $O>jbLiM$G\c. Ji@!)|N1X(QHLd$&=cu `IJ0I̥Dع^"gF hR |dW(2Ezҋ'㣖F "a,BG<UUtY*bX#³-gy)%Nl\ňOb-CY(?<,PJB-Z$n D sfLJa ֚FzX  i2u(j#$V1ꅑJmc}#q`8-RLp72n5k~}?Ьǁy4`#șMwcb VD^cP`2 4AȞ6!F=hEG^`nZ E`\s BRq$\H uXERJK@f q)RDhL$xX[yTnWDE>Pym+t ښ]m>v$\gń$지WK)#ցjGiFp& l'gq؞+vmj()ȤZt`h1>(!,]p,,"+0IQ:My(1"6EґA, xڶkZ )$} -rF  HB{%"8OGxrI&xu,6 ALhSt8\0xu5Yڠ^f`R&dXxv={7}~nrG7*~^R&@rIk]4ӏuH6EZ3LpkAp)`_׃9AwFγ#Iv#'g'K8OMs%Qr,4E<͖S׋2㿧UtWRIZԷΧE<'ǟf SQ/@]"$-_cb8b g@O?89Ei]Wѷ~o0fH]5>;5L]OOyi)eP}.QV3wGHKhD-E1QP FdJi/Tђќh(A||ʧ1mi)!~D[`f+4+l&j8DV T{e],$rќ$jD%Z=Eq%RG#TJr}*Q)I'RsH\%D:@*QY؝zBJiTw]w\\dRΫ٫ڕv޸I%9a眞#:?}T$ ✳STw_fZcB@ht"Nrrb:Q)T'&kN=9E`7"Pj3r1lF- Ttj۩T׍CqUPE}ήh%,>I!._N0/ y_ßq(>}{RbrCA4׎ǜ#&2QZ!Φ̓uShnuҵ,ڵBйWڇ &qAϯ<4wqA_ӃpDSk$Va՘ ^TKg\dId>pm>W@8/NCobOpx=R6EYs=8_o^-,='?;߻];B~ 3o,C;l$$Ew:bpdD`98.R#Hѱ yק-E,V$+=)@|y.ŵK" ʼ{׭6,:?-j2{52?1&o ?aF㬊:Oyڂ}&z^C(SN21,Rg2DOL}HJ80%_@}ǵBw|Y^ ȧyGͺO:fLs4l\Y-]DUɆ7]lץrie'ۂ"glP9  _As٭KmNoe{{־q摧"z;P!HNGҁ8ܑϨ74#fu:၅6uXkgy}#1'?L Mym)cHCLHpr@3fA2Vi>ŃuqǼ0oNY՛bW?I Fg)-#7ξy F{o#~Z60{Gyy+nqŇ S<`s'<Ncʠ:CgC9PqTsρŬPyuer8~k\XuA7p}RmKOv\@$*8ʝ7\ 8P=x:FDC NVFԚ Sk``x6 Zfԗsnq0:O %^WYUY!-9/|C~5l+x1-1zLY'[h[LgK+^.;^5lul@=FboPopûw1Ņ41^:5*Cz ^ 1[,} *v^ò{h 2-ǡ]U[-CZs`$r9D:x|R#92 c[yf9' u";ga@9[؞s `ZZ'=EO@qFIH2c.Dx便S.1,:GCvI9ףޗ.:CFh+syrYYG};Oq<>C`XX V2烴Xi*pyM [`NcF1Zle(wFm6*GT$;A 0Utȴs7/YS8^`XMQ샭+c }E~XNΎܓ)܎<(;F}ٯ4{sScd XpLEY4j5fV3܂m$X`$8Ι# yq}k\_K1rf] <.h8$0tQ-@("[KmUU| JSn0bz!DmKlRGׂZl^7U|R!T׺(],7`›q5ERA|Shfc;^}S]RW2ܬ+ä\i4y~_+?@'%3X?<>+_ 4 zU qno c8]I0{?ڂB4"ٜ/2-&^ƟGHB,,'a3=Nwj) C#E?be#U&rԜ}_^. Q1L aLb$o4~s9^/}VTfF<2׿]4Ѥ5qj׉۝JXPzsya 8ϽdZblkχʷy4g]LI8WEɿn?һCI$H/<:%Xvz2tWo?_]GS2_OyÿޤEӛ9sx(n=*ךibiAV"a,+xךdv{ڒ,X|6|$UeσdgRW =P % zv;kbu._O `wh"*gx@ъJݽg$ 1qLԸ" Mwd*6^;kmlPwn?k3bkEHcqBr`mF!-mr:>?{ƑB.Q2`Nl3,X!S","%L6%JV$ΰꯞ_g\߀9n;~8RM$ eH>0*$8sӆH"F/@Ϋ:z+-57d(I{L/jد) V=a 5%do Z‹G"D(LϺM(NoƘJi3W1Vx< I0A"o)/Cyա8/#q^K3*u}<O~\@%^\@?˻Ym:_?zߕh8HKU6tq 57:Ebq[JO#Alpï +օy$Fi.`ef $ݫqp\Ⰲ^_\pcK\cIZn8~.Ŏks\ tiIT 8J\6HXŬQ:"eްke\86g@N%TT&H@x8j>1rz*SRJGܵBǴ-v|>٤;lHXkx"BG>2XQO>(tC$i傴U*+2zS,I*][[%(>B7 ȣ(tCRH%b&:ig\& < 9F0ĄPwYYL(Eb:ۥ&ruP@)Y"!(O'c0,&ΖIL鏇7Ig1lfפomE-ܐX “ mfD8Yf74=Yozp5ۜY/ҍ9w\+ZC鐥lB3p1ivj.]Ѽ!ql]7ZYiswVW7&l~<s'a4֙{mSs[7=<jۃ ȅ﷥6ż @ʗ*.ܛP/jd&{8\Pի83<52;yÓ& ?3꿉h]k4tr"p\}o lXfWۆ}wG_Չ+ki5i;aߗA&*(b?6hkVmu`81ɹw8RbYM@ЬPo-cNRNG?ٮҴ"XR٨ЁԚj"L8 IK@:V*.M{b{&?c:7?\̅SAimn/L*( c"q|U1aj0iw7EN2GaF) JJyx#4E\ |q's~CYpJ59N%vzENVS;K׻9 ^ƞ&צגĜ7ZOHt6#gE5g?w5ykfx*?F 1tlÞȋOsekS2%Wӿ&նGaD~i lƵ8@},^W=ܨYRW禯GhR'oའ3hvhY1lF%Ή4u^]tӯ?8o~??|ϟ~}s(gÛq8z\NWSK_@!ۯڔv5l5g%z2+J>~𢡊J[;]?[Ykcݑ7S"z4[3mwm?t+[7uZݕJ };eMF (Vp򕅙ևVI]̖T5HCI~;uN%`٪?'p "<ѓDk.2/+C;y0+J?)}N]NĀ~F q^\KE)Z#S\ժ1EHz95w)$mi>|q8Ѥ|TEnB*IG(&'JtBl7lBu95(Ik=ko']<3t23C%M}P 9lBHz@ JP\UIPyZkk թErcSb}| bg{?Y#x@s…N&NDC'%J '*z:ydqgڟ1>-xj~)gskچ/m7.-:z$t$=0 ͪl{5ZsrT9(E$ZԊ$**VFc`$WNeꓙaI1Z %I4`.bqe.ag -P&4 Q[Ҕ+L1A:U>#5 U" gv #0>*m@.I}reXN5 a"i  2O}JGӡJ'Sh_ڳ+%fE#,ûǐhPlZ?[IhIo)j7HM)x>XiB2VAH&D;tJ<9[l_X҈@ Q%R_D)iL)JJZlaΞ pglr'!(fJm[Dҥ+gv~s1jZmZjOkv`WxD\ F䍥 5ytjUAx80 G[!= mMKc'u!@<:QY[g=|otXbcW [DZ"):mywFhc"xI*!Qk#4Ǐ2R6irD'(S<(`)׈M{(uν gΗ>BULEW.*Yjjea[u3>eOD#"֡QL0y~' #PQTЭ]<]<{Xlvjua{#%LXObY \f?>R#~4/opKVM|B@'p[(ɜbHԑ<+',$R @B\j,Y圍QXjUb.2$lbN3U+P(I!J8h-l bft5u͑҈oXAH-o:<HL!PBk6sp|D_{r<`X ɛh U '\0Z2->iWNluK]f8-1WSpᢆ@4OsmVdX 6@uF{H)w֫c߸qSTK8DR)%\[@(\BrIoKձ#ұalq=o]0oɵmHp# Y*&G/5/.5N5ĀU&'tY5mP\PsDD(ύFx-!jeSMd$I&OJ0¢4[e{$DPG'LIExbKY,3ɀk MΨŨzx|霗~›8ࣅ1%oif3gy0Ϛu(Lq/C~sv{Z竓f8~Ǝ>\'.~6^7{ ̙:؟sø\?s8"픘x?)GT^ԒIQKEFֳz\g_FT'/\wػ6$W 2.)KhN`ɟ'煫IkI ~+-\m:/ BqU},%'-\FMn4;ť ċoY%~ιٻBud-|9%K1^UiH I9)'ߛyilny.30(s~]JU._M1:*\'g(#(QYXI^fLb BƂ!4 m}xRN( r"ȡy𞟊y͢ӛ=;<߿ײRAɰ' ǿGsɼX5~Xgmyib04wK7(T@VKXugl{. t1neoϮ]ViAWo,I+q p@\6N(*XGAN@p(E56j>CM4ue)"bICd3W)/ǣl7w.+Q9 x?Y&'𹸥N؛a6ը(Krpp0^p0xq_%pn%olQ"JQF%nIG7ͩw~Ca^|Mqi+(_E܏G uKM%ˆ>u~ӗ.rb \Ibp2RռXxE}{p?+s?+ Fga3a.N/u;gur39CNޯW{g7nŞXՒ+[UͰ +*qW={1^fY螗3b1r\w՜5mu(dNj蕊wHhѯ,>}8'՗ ߹<xo?;Ͽ8y e'?]/qc8X%c[[p:M&Ms k4Yo=uMv.IgRh;~V ůW::$xw]g{&jŝL2\B&_Aq_LtT-z6E D#، WGxɍNjEܫ#-r%z,EvF mζ/PUc?$p "8)JJ دEበ!CFqtu/UVUYfmY9> ^zUp^VZ# 5o5б=N>9꘰ !Ar4LPzJV'4J@fk +tK<w':hע|tڤ[Esz ^kM'6DB-ME Θ b?Ċmⷱ+W }=6{ e_fpzc0s#^lڿ! ݆MtBQzVP%h*oTCX>P{:J5Uq.R\!cHZdږN-┵\j.5of'.\Vs…N&ND# RbAQ9f+Y7HQӓQgam@NH%X:v\{ޛct %nN<1. INyrsls#dWNv⧋6h͸X ź?,'|i߂a~S|d[N4Nci*w4sJF"I dgji*4~q pZ8d)Où<\yt'{P+cVNKh6n4gG%0'! &H[ ˫LGYe't™21T`aJmK1ʪ-oG%IrcTv,Qd,KG Q)Te%8JS(D ckq|bq-{bqj/ĩ.x!kTʀrq iM*QO(6j52v4`TxQ#u 5.EzD ] ҮY) Q* XA5,,smw FK'MꎨnɻގՂHs6O<njHA#>$iM`TYgD!ru)+_c\>T 7K8DR)%\[D(ܵBrIoK]c{N^kf5 H^n j́d"yAP`Eŋ)}]rߊvR ?abD7'8P_(W@%ZZ)\iYIm \hD6ui`<sQl!je2j8g!15Սqyv\\ $V&IleO̹\)Oa#nc<#B =fa]{ɌH^2;i=sY$FLnbcJjc%rqȰt[ %R$QQ4L<#ruNnur?s#YgrOAIaEIL@l .KXk r .H(T1HLrU*.ag(@,Jj[#TɆYgcyr/d $W_O_&3*B|I،3˨\M@4 JxǢ ʁi6)eXC% 9.h-D<.8BFM͉ritRyxA i)glf_r476O6ѼJѼ n+ZZo j7/wZp2?l 3T~$' 29E5)5MQD" HkP) NUkQdZHZ)C>GcX[J؄ 426&qư!r0  w]ef|\b1I o~my7_w;V1b{t% Q/IZ@$6:٣0vCVl`;xv \*/IvD| [DMu4%È0b j76;6Em,jעv`WźIϥ`DLXp[wCrBm2J< 8fcݒz\b9@fHhϣB x"^"q+.$G:*ky+>{;:9~Dll~laDd-"xxJ[hmǝ^ EH@RRBv#DHeIRQq5rGӖ[[E\{MgiJ,P;a9^\{eljla\-.xߌOFYȈuxe(!L߉JBF>H-GtO͎MP7j3<|Xv3ŋj|"_cFQy;z?>RJ'+0(K?-p H]%Q^:l))a,Z'ȶV $pH`Q .%YL ag9͹KҔ5!5RSKH! 5AEƙMQ娀5VBoAvѦަ,H3#j.!r#iӑ˦f }GGћ9Ř}wg| ,pd]abn;O|ë~t[z>f|KRSrN1ِj Q9'+&}$M  1kd4Jc,C.($^Ҟ!*Ř[IۭWZ#~ ^spxsd;Rtަ16ey RK˲GF˗QM.Ă `ƋTi#_hpUxwV6`}ٯ4{TpkUr=h樌" 9oKmH_o|2ʏk(Xu?!Q}Uqzbœx `}J"}8rL\(^+;ӎMfסoT.#;r ehT>ْvֹab { U:~Jnj)Ii5i]Zu8&?s y?޿|gGTunc{3\}9J2c_$d"DB 3 e$=9ez'Hm).{la^Ϻ٩O98!+pz{;TߋPV&ݐ#|2wz)e~(1OכEI}?.h2lp x2|AC}M?L Jo_Y=^ݛ{?*3~2^I8ȿ^O/p,\V/C%LQAk5F~4}xW\h2okM5_^,}_xk?P˰?x坋HB悛f6߿\c^abs}w-KֽКshfCzO|SfcC(}DYӣzL{7e9CO=]ǻ$m_76Mh4 )%U #WAk^`/Tn*lgter.o;ڢ2z{~Ӛ6e{&*WWknQ^r0y٤*Mev+K~& Fc.=%Q'Rn)h!`!2NxH1M$htVE t\‘ PpDžQ! 6DN鵢2tUGjF.xcMvq/K<.v=āb -£U'3?ː>L4Wäzm*'^}1ƴ#68[8xcሇ“$.Gz?9\ y9\ؽe5Zk)9XΎ\-k7=L?qK68-U64q 57:Ebq[9h- # v&́LL+Eu:}e2ddj82J2/)L'2JԚWWH%'SW/P])gk'2NF]!H難LaǮJAI^ '2p:*+詨L"ǮJ*M^2[,m y[V\E풯߰?|q`#B {JOoڛfgsS}FD.QSp'3tP%+Z~2S)d-Έ^]=`}hT0rmVާDBRWuuВRW`OF]!SQWZ&]]e*t%+Ʉ|ʸ*%7ifKY*_Izt)謎Zq|\@>&pLi:$xbD WLYhQ眞>%1(~}E%MԞ}Z^z},a.Dilyοۙ;J0-)YaKˣ09Tǩr FM!2Q<#] 6`+7ʯ_ߔk6*ͫ&swqi u٢ JBYj׹BHZD.j[8 S_ɿإNy{(.o~c2r"}Q \dI::"%cr^":k֔z.do$]T'  &4@iv-ӶFՋϣj 295|tp`C:ކǑOϩ8;lݸ9vܛbၱ@GBTy6Z\/- Q)Te8 S(@q3M&v8hOzՑG׫WEK [ (h=o\BZhܨxBPUhe&PV;-PG:L1.5.'Z"dF0%O-C٦/T2}נh}Vkj-\b*"/M©>T `vtx0xFżu34ł*HYиhi, jA !ݎ!E 9N Gyl5u7V?нЯtǞgJg~::M>v]|3_`Vo aYVhxV-,5*c= VDf ,PK,ı O A#RFH_x{R2xddJ <(TrghojEB!J.(W= *%Ceqq- QXuQ :&"9<)iA*,%Wjfż0zksʎӟC\ܮִ =6vi$Dc,(&J9.ͥYRJ29^K@:5W&Y8r -\9D^s_0w0.ў^{hSD%_R-,ӱEL{uNN}J(N9y0M'pٻeNf4Ga04{ PN٘])ܭ(gv:tjYW%7ZOHt6#eE5 rOX] T^.fȆzl7*f gMr(ōTh*TٻrWvKq N߹ A"5-K˽%YZd}G֡"OU+K#Uc1]>^q[H~POliFg:{7/g+qs2_VVwPČ64 -vG+nY6#Y=aagX )S+XWBO\n6L.Sb󨷓\7꺹z[:7VBT0aUkJgWh1N-?g Fgipuw??{߽wRwo~~ûxnr:z4 >?CVCxCk; UY|qM)[Nh -v֩i/>T|-'1Nw{, f1\DyS*oKb?Ьkkn>TpC)g]d`kDŽ "Z4Q-RA1E,2Onu^7ttⅫtdmIB☷!*:iWl:MFZ;M.vN`U3ث`ˎiYj^9qŭZK+pU4>LJ{[G$GătWj\ t7W)Ǝu@ 3%hUu紋j4;zrG,:X^, *Hz6gwÐ =*|:tGg7ܮXSWgg˿E9]}ݫޏƟFkK}!£N /p3Qiظz--ѥƍ[iRiclh4.îaWc g"agt^*+XL`}ex*ZG0,^YJKuܰH2(!(Z+*J&/}ӣ7]=H6T/bL(H2OE?ߌGo% B\Գ .&]9vͅ}bG1b`gf] YRI-J4E.x&]*̛ɕo:=̨?&0BYUֱ`$Rꡟ66E.gٚѳA)Ա$@ԅج46Fёa>!AϏ~|g.Cf> t Ah\ #Rv&!QH.Bh KPx@SFOQ%2D4ϖZ4>΁؊킚ϯ2~^˩xk4㓽5Q5anLGָj}[Uܙ'oWk?SǓ[^يI9ybZyf`$[J׈O/K$ye`\oсF E))FSh+D%F &hXTo9*!*ȗ2Yi5Ji +֚95c;L6=uuuAuႢ:^Ma /'_&iwt8? Owd 40+ ɚX3,Y$[slh{e:FUUںd!Z ¶írWٮxDLS͸c_-jygġL.)t,l%|MH,AvE%2G]OTS}XuXj1P[IgǺHVeekHŐHlTgX6#g>5 zPsǾQ5ֈ׈Fx%d #V'|:CPN#qBdD)b DN1!,fZb@g"0p:Z8NBR1CkFv"󫎬'\Nꬳmc{M_C,Fy \P(P]T46쳰qQ^/B/>lCX*lQJuM)/|9#wJ{qF1v ~|"GV? LT8\*WjzJ}¹1aܥ^}12ɢEd;_?U "'*Np tG{qSAk J1 s6Jo "1gX4`A$Qw-EN;m H].D2r^x9`"o0f4`w5җ-`~Č@!$:KX;E" kؘx/!mϻo!H/)#): !Q}rZ _cM4ʛ`M*^(hRVv{a6PoMV1I_0մQ@<OLFʻXʄŐtiT-WZ.2CʑXLq3 ǻ2|ܮ^NHƮ,"-k?p~@[Dۋ'Z[8XU~ٓ0f[GԯW7$0;)jP%#e.>HoզSziX1[p&T6a^Xlʬc 2] עwPiXr HZըn8U:>}Uo אYwKO%|'aAjV4z㏣Eއ8ʝ@Rg7 PccYx'Qa %6BX@ʈǔ]2vO"cW&pJ!]A5k 3dYthSdVHiKnz=,J{h#edB-Y$P2z]L G,t}=֌#z#.xcīZ5!׃4-oV+\8a0٠B&IVR) Ϩ4B2#5b:\.$)QU>\F_e$JbUjDL=7#g/P*qSD&ˍX_;mYDzs1`mۮOcj)Xz)W~}Wl`bZ켰ZagP?`[s;q=g8#Ab#X3ڒ`ΊdL0IU?`9XBQ TB:։ݱbCuY/[ _MxFa&-(D ^&6Ĭ-X@-4ʛO셷Kݛz>~`?2c%]׸78p [ThB@QYv,v`Qvh)t^pXic_WhJOB YB,M:rbAFA1Fؾ F!YGlڶ3g^yϸvG>+6o[»w.I6἞CTQ{/edm! =@C +<|2><䡎۞RF#%x2hK AB IHzo)Knc8&̏W76}_rs]I8bm`JeN%N KrʘbXQK {+'P uM3&lFTpB$ lo+#rW$~)'7GWqH>]|2>ݭJ]u³!1๥j;) 0%e@B?V9!C/K) ix7ԃsUtJur pR=Nt9u6tS>\ٿ|{Lg!p8M}2sÙt1\No"Ǧ³ LD|_x:BqTI8i4p]}¾Gf5 ;g>G'"fi M=F|}V^km#G_&@pr`#FIwb?nIZtr7Ů&YŪbկQsrCFU *{SPehҜqW yDdW+q>oWM4itĤ4:{M:vƢھ& f17nqz: 󡊪8oW5 }hN%A{ٍ.sT_E ݰY_ pրƘa4ɪ'%>#=:Ez&Ni獱Mc3n8D[mq:`WfЛNNLSz-3ILmgJvaNؚ Fkum~[6IL*dR힐uw^;W DDcsvϞA[N(S#â#$"r% )),Ҋ}r^B=HZ8S{͡(H¼c9&C(>SK>jO,XpkEu^F~]m%7ŽzyT Łr Kz˪G Hܟ:<RSLN <u 9Rja5Ւ挅[X1Q]ir<H |$EFl^fl%Q&N?qy8De5 , sR,W#Q&.ӏ_i:ZYKS D LM`K`QE57V"FMsFޱXǛ%#_gNÀr:2΄y!:Y]YtNo4MKw>K?[0c9-r1Krᧅ2ݙ1G3GyQGY>rP mJ nTwdJز NI*&Nkz)PM EέaYqGREbm%RHDc^٭Js:#r_iWtgԾ6vߣK7Ny +NWRo `hna{FD§c{RZ+y䩈G Ar*<"ǹm^pc4cČQwG̓LHsJ凖{o/ڦ>V zc?߷"3! &T"H)RƐ{;U+BDʀ21 {)1aSxN5歴Sz8!e`U.v7距}@ eЦP[60{* 4W2:D<y, rH8+ 'U>|}a=c0A0ɵ r B($<Mh՞<#Dz4‚SG]+ onޓa:)xַERLREnSHc``*մd,8NBP<6ьȄy-0iy' MV]]FT,G<\2Vi*S; vAAE< #"[+^r~Ukw/Y=B#ߟǟs*%0NE̥ԡ^ s`*BrR;[- ԬΉ?Ze&l{pv}d%]mgvI]mD%vI]mD%vI]0aDJL CI1n8.pDlngW&Wf&s{paǷG?9 >5yZUFXm;vE7hҝuVJZY;QLRvW&@w1xRlbm5ZUc]y޸u -pZ~X]k~8{V:eZAA8][/ro^Mw\xps} A*zUNU-6ۜ#*|\`{n FJĵa[/QN)93Z#lR iGT{=Xƀ1T)b+t@d>9HysQ STUCha=ȊvUEtFviZXr?1#pIe%@VWdȤ uˤq.sdT̛hNc3Z`(Ket`Ejp'95rvsF= q:( G_~~MQ:β| ˒j}s$|WBJPrCA4׎ǜ#&2QZ!:;;;+8S{Oe>18Ü#iƂcX,Z(Q1B-؆JUCSŦtNJl5{v;([URnA 5bHM5L5yGŒDv0FXb=yE"RN"D" N0ܟ`p:o e'%Bk0xD7cQ13OHx?.z{N@lMSuG4:uPs#h;&r\=޳4z7.Fu [<}8zf| rt`)aPR;O;g8~(z:$' q3`OblHG'R3 Fi`H;z=,~Z+ѣZ/Xrm8 lh4aތ./}d|`@9ɭ->X})Jb_a<="DdsB `r3NAO߇2_l&a)ƫ/6)N"04_e AM#sP9&rԜPÄ(ޔŶ0J4'%EpBKy(Qʇh`Eef.fUM1i^J 50W05u;L׻Y]^폞`i~I7E?ׯ|&tf. ߋPItA_/tگo_iz5ouy?ߧEO9? zpQ]*iyj [^$wiYV)@ʽК)4!ItG |SfcK(>"atu0 IV=)IzZ﩯։.b#?4qO.>omw5P%&jAҔU 7+Rker.n;;zVKu7tDP$UY5S텯^۬3hcުǴyNeR%on𪻸i׽`'"htC=6?P{ trFp>!q`;.QH1L(`tVE t\‘ xNoͤ@Ac̉51(BXtZʵQ{Jg*^+B/@5zk4nkm,.Գϣ.cZ ^V=C)J ʐ攒jH/fHcxG&DLZΙІxsҧ&@;"wvuuGKǣ9#sj8LA7eRjt!(*2 +n<QHۄ A`DQFe)A@@'Mzqyֲ9{&6'f v"Qs9l05oWla#6Hvw]eGs˽b kqfTؑ`G0 0߰CI[* <|aÅ>_y}5X)TD2'T%?fi%1#Ayn,+? u9f}#)V!"A )HÌl< fI;cn?yv!9QI~ uWpR zpBA4 2޵c;2!NP=Bt)$!KBRZ=NQ%!VD&ʣNPnPl^?dC;S~υ $E'Ù}l!\='rT`b%O򂥘(dd\x~0[ww Sk%y MQĮs2y&g+0{ a]\Kح0uf2 mb21"u^ݫ+B*CP& -e 03r- H}(",3 E*Wy74>In)DI5J0꿪 YtyryrR`H!nF޵q$/Ik;gc7 JM 7)[dQr$Θ 4CvE5 LXl8}ZqKH_6eB]jӽu?Uo^/Ϧ'/Vkh#/a?T=EϮlD0I*^ki zb]MIk:]W [[dm5S0(AE\G'uOg1rsFu][ V/kCZHV,a|?,z6=JioTM|bP7`NRV{vyՋ_ϯ?u2_?~Vo)`*uOP[~=uU -FܡjlRkR|ҠZ z?>?ƗI@/yhyNk&X}T_ 槥'>a^D4 ƒB h9S)9ȳ1| @R<#>8gDg O$7 L)@YRj;CwW&:ع05{>P]"$L:)NuVPYH@;ة`QW͉<@vdW6N}k}4}ѫ:0).256)PB lkmP`4x_Bp (1dpu$m?Ywhԥeދ[A -üf)T]TqTa cg8*U?0aWv4UAAyvvãh|`c?Lf\N#o^n %phv(E rQ"SYY\\*Y*G2udR%FNx~Tu:elg%eSP]+17(.`DbٹlY.HVp1Qn,WM %\ƸuH<,.Y1wfZRGq9aՊ;@}#,;kS-k{|Lqǻh[Kou;:0R ]v`N*8>9;G?SE֒t) ǢPeńmFu6ݦ; 4%P/5wD\LFLX*[/3$s3PSKh0jңjY !qۆzj1!35ϴ҄Ev_36*\7aO <{l/ Kd`Z_\V>:s7*?mONgxpzܠ{\O ~\5 ӂd* !+. NOnnurs#,kqПV҂hD#qPPY'(u8. U) )zAGm[*D,Δfw;+Ҟq?FnRfH{NPD05{o WI 0Z!i\x|^z -ÐTdPfwI 4$AH4@,5/ךPUݺ.ø j)Ͼ?tayS]6O6M-Zx8֜]^/;i Z[-%i'&ftX\zU2ɲ x (ԉFC`Z(g(^ Q+3*3{bE)Ioa&o:>*(e 68wpMW kr!'5sAp6f&7}mIn0}#3v$/h`ـ&$( Z$% JzPQ2EWgsR{an@cEF]J%Q3uێ0C$hSQG]pngl?)]ؔi=/cN]wyDV <('eKיG%fT,vD c~I +:kb"ALP8&>GYFNڹ68aiԣฏLJ0bmcSFd53"cwH`E;|[#iR!9<zw;AQ`$Rf)Z<t=4O):&4w|PmpngO' /Np5jY)/ywM3>[TĀF'h\h* IDYaEECbSٱ)PoƇ@a8$k"O\#o l1x]+7Np㎼1KjQx0Lkƒ!\C Z`R݅`Fkbk:]!\cBWtBKte)cAWT &\w|*G+dIV-0`#ɏ_~~7MFMP;"~=bCv2+޸JXh+FF-#:(itiyǡG잮:AZc~hَ~(l]{ЕjӡF*+ZDWXsB4M+Dm#dȘM%&n )@5lsGO0.p4d`8{|s,lЍǎX_xxXZ# LhMGH/DBC]}تaTڝϳyG)PB lѮW_@ﱺ9}RN(4Uydam3Ozgg$\]s"ZH?G޷POȡ''^5գ/B'$ {QDڼB5m;x% .9?;pgٯ"~de]z>p5;|?|G3Cٴ}Bw+C/^{S֪.o}o:"Q_QgQ`]垹i8YVtPߏ'~,0YEJJY-x!s=U%-gl!ηq!ĩV>'Wȕ Ɏ>Z/yb:D(m %)F&@"o R1 H!,37g&0E.}?|> !&FN%_aS)sM>a ^oq\xǧvnBI7VdZif4LLoWΉܽh^|K?< NW|Y&9g)WIy!dGZH8 u{ L9$Ӆ*.ɽL՜=NYe1 S)!sch~Y? S$pV$z >_][]SӮ QO&|~UC^m7,k-@R~|U._ΩuhwnlMMc$gT8ioZx5U \ VŅN1׻} I`ǸGZKzm$QG rG)%]'` kܱ=\)'ʆ,EsғG:]:0M^Nw>pQNI<5/o;ޙM=vd |V%Ap I!Ju`h^H*25{ ;,|K1 Bmj Q?^mnng۰mH9=zP?t_POFʿNhee> R\O&3 M+aI*cNeɨY$dʴ;ғPFİ\Fr!4lTF|=;T:Q^:-KriHNV,I3Ol%[ia~>Mq7Tm:"Ty/(CH $UVxV6)z錾?K}A:Н1&l<{k4rndB~ڔLs dDY=măZ#1<ː -P3#l5:!T +q}HfX?M6l,UkR7( ;Ws^TNgmZѻO|?Wо,U@n|wk1}A|InL>V=E_ -?&s=^ovGe  j5jtZbFk)CgT2xj39}R6ԡNm`)i1yk+ *hP`U2=Kͥ?(!mZ55a1L*]2 ͒%eS`rh8 ע41i5hYzv&:c-W$Cj:tfS3!:C 5=Yn KI3GH+=:ZYV6WV_[A1Q**:Tf@9ȘI{;Iw]xg8Yv>peii"Jeu3pp53QunV_y4#s|=Q39kRhsĴv&C0H(k9Y {dFA;b1WyR CKUP*U̖ U %v.ULT!r},IAQ# f5DBrS-l4xJ*,Wz؁eLC d\ s!#l!H@Z05w}ٲ6mͶI,ÁT@쏚5"'\qk.IU6JV@Z }~V~z2 ) &Jg ^FO0DD<&PyYs+ eYV"%DM9,+JR@CmxZhd-GEZ#g=$ ]-h/$;GU9멗ǧblOLl}ּ> &ƭC̳>l<6VMمQOmɖ=X%}, } Z=:%5)>L^~B#9l1 2,$QXL!WzBYQH!NOI,,C08@"3Dg2ɭ IT&2l w#x{wy9vq 1:2LǤNB*+H1dTݽoYI2u7L^D#$KZPa6-r j:[~ݾίw(AG1-b폇+.H|="ēɽ5-|0Ɩ;/s0\R\rɌVP&qLt0#AԲѪfBU, Փbva0 ;B3xPs-= |&%նfl RZ*8ぺPuGՅ3.'Ym(Ne/z^<ƎQDeK IU(;ԲAeuًݥK0MCq< &Cء&j&oǬ+0%c,RG[a9sWvkPړZ{`WmQjeܐAzW} p}v5*=q{%摬Dr /:dH`bQ$lS|Ere}9aԗalwwE#Ոe(qbiCH/> r\ƒNYR,gI20nd1MN"I.k%虀!Z&=LN (Bl{Pk׈W>u 8&µSζzQ^/]:ff i0ABFw |L61* EЋ[㎇C۲>|`!GPa%R]鬶Հe?pcFQg;n~|GŘYcyq&@:/~Lq0e!vrN //_4G^KOgv[0 yd\ v:[Q#sôvT1EMf(+tBk99|З3^5/$҅](0M"uU"[&GW?ffgʹݻ[A mw3ͥ54M釚4z_ FQ?-3'æg]/Ouf:čpFpp-'P/~/[L`C^.ZNq⡲Lۊ|gY)|9Er 9d(uFiQR##D2*T-_.E/9{wۍrY&e+#2d)}(|f$R">5r{S)QM}6<<pٯrnՊg=8SO:P2tAGP,$91l)蜼aV@|^x[މ%.hIVde?漓2xz (vh|RC!Lk 8(ռ"GOBB0I"?j8Jn8gbQ7;W$Y!8 A802ALnC棼;18SyMY,w ;=s VB*)x&>F0fed[s^^CϦ-pR Rp *pa xWf&|?2C 8myH%UDxi|6C5*'u^qMԔ%Di#?m9P>iOk]6saF֞ч>ͮ{m7;jс˔WJI1 bHLCNCԨ2OTSf.kRWxKerơU0Q\,M.Rg֌I8J-58gF=h4KMd.v~uU¶lST44NC8Bߗ'_9F_Flʅ7K1^X/S!II;:;򉖫=;8n(_/N`7:9 0Q%y%O-dKr64JSg7n.sRnϏAmj%"u8H96 p]ꈧMfסS>cp|4}&0h4wJg%~]ukC/tqD *mECi؟MO)7KLQ>1\]v M6; ;~Z:DP}!%=:kx('x9T5I=o[^47촻bgmht;6ݍԻu/WdQfrEέ^V,oFG5Z ^\|vڐ`s\(:EM˔h0Ǥhqí\ Rpw`BrI.\\NO #S̮ޤv&03L\˝Zó@;я&PFY_xVYu˫g?'|ʾ#}8oG攜O#l GM?Mqp}ǡhq(RՉUv,sCD>@ga۽TF]U*+~׾DA+IBiַV ^kɋny>W#ͬCoke, Y^i na%h [U0cx!cIC luj ~kx*$g5v<3k2*GY:,dc r+*eAFW!+Bs? u@|wQbX~.y+eg߮>v[z6BO^s[tu'񇥫an(WρttuS4 ʀhыsR]>p{=&#:@k/W+@J:;?~떛0( KJ!e(5,D56nb:MVV)!ܴe>BJ2|l7j1z%74M-%\?DY ZjK'邸{UkzEq>R9jذ(gGHEY_薆yxpԼ kJc0[)vʑ9=g#0a9&E  i`*gE긢^ɤ P)p)Cqoouk|Zni>rE? \.&&9&΂,hÕpI DrAeD A7i8AM$X)v!9@"L2|,sIP}5MM%HI~Rm k(aF6Ѷi#ZZí⮂G?ߚd)%)}p+$hX!SjR2jae"!Z(ZBCvAJdaL)Ҍ:f^YMA+c\.pVI!%I ۬'8x B)D"G@iDZbŁ&l<כL6)cKWT#Y[R%sz9 C> Q)?o6ByVyItI(CL:aԂD\J<ЀM$䒥hy]Nr%HjM|- {R`8.3\1p@'Є֢i)MD9KFHԌ.4%zq˨Bdjʔ `I`q>QEys#H`>$"`7&H\) 1vZ,A;՞&KUyP!R4Whu"DAgܚ8+W&b;-낈}A_L?iCui.VFKk9>9pVԀhb6BZtWWUuˌEF Rd|r b][;Ns㈖mDm"Bڕ,i>06#UC++rqCf2a=:RQ9q$EG }GԐ#+ALqͅRpEW}P% AT>#w124jp\v5xBF!@7Ǡ1XT5f}N;!J1Vq}F ԙJ/>ŌTU%1]dq8QťTIkB`BBK4O #;2mz÷%<yJLJZ1+=?˾ 5r1&PwKjĺ HƛꐅK9;Sw8<+9D\H UeBX89(y`1&b)rE ;4xA"Ӫk zʇ`]Ƶ.h^|OC !e3 ez6"_<ނw q _zJ,0k&t!ǨoTC}^i ~G9mC5YKqZqةbMBbݍ Q'KSPexy퀼iD4DxR!': 9(PbR$ R# `iD])?LEQl ce$%a@Ehg[8w utP,TBvuD4` X[vD0fGyhМ%C'eGm8.A $3upZٗ4 e@HJEVGE<UYӰՠP>fnqcrj]!dk!P gHg}xsAT 42+'z$6@Սx魪*"XamH ICE|m6ZͣxajCDq`nzwvL{sn\ W !QW<"u,rAR@P D)ALfxjjxC1:ĤXƐ59vP!4oxkPiF< (ys暷S׷? zI)c˵dW֥U,ZB3X+TWsnYmKTZ%Z#bX9X}~4%y\[3 f/6,m~al( {_ټoy֊yƹj6/\871H:?&LG`Q,T/f_zPy9Yeb:^uKqYe+Vk?Kp*=ӹV:tr\M>1ɥjp"\MSyU$ܦ7)Xy{yzվ77uͻ_1ˎT_#/nK^f?}^ow7曇5rޟanXm#M/W˞~B3G_6Rvn,mɿ}уY zl;ٶ8o) O<_#{|j 7s]?ln~~C׾V/3vц46oݐ6 u XAu^K~ٖ=FN5.9oGYU)ۖ@-a/jdF?cxlecMqJL9lL-\/i猢BC6`ε }:sT/JlU7%^n6جR;)WXb[wD-W_aE`.\{嗎+VySp\9Lp+K jKt5tоbOA3\\z f鸂Jj|FW,8npA^pjOe隦riW+$+g7 vbQ׈h|ZqDSe˵Y{\~Tt̅Q;x=0@phR77Xm\L^(PԹ&])lOj\s)jip5Q%-kʎquizm]< D\ڨ+VWkĕu+S(פ^pj.WG qe?|\Ap Xϸ6U&qWb=`\bvb2wJ\@#\AmF\\CՆ]ʤW+ĕ'WB?sW,7u+*9Q%j )4%u⓴' jJej^Gg;'eզ\ZGKNpF\%Ro\cΓg ul7N&8u.LCnҪLݩirc7 P]:XIE#\S_x0QnWP[:Xji{ \vj_:XΦ\|=~AȍJ+Vk?w*]\W!)$ZS \T;%t\J/qա MGoY/>b V+aq]j1;%'LLp\fiV<`wLg69W)fG}#ħɵ'>wZ:\Zl&>RdMGb\S7kt\JW+JbG`B7b{=!TF+Z!UW^+|  }JbV+RI/IQ t+ɮX]&~?v2ƧAp YnfjK4J(P>21/Nͩk)r 6":IV [apeWښxhS7b.+VibKEp*2EGn2ҁu1L{}KeZ]x?}-vfm&:(餕eR#,HyzLq(ƃ@Mu?޾}hm9@!Gm㘓6JBƠ`rX5 ?M}Q<&FhWMBK41tv31j[V&V81`m'\`o \/@S59K'\ϲ =$zeVijwATWݏe3JS#~mkw^>X~\x\_}w2޴7sjo>_?=U]_Y%B8tWODow?n޶|$7bmvу}s?qv^ހuU`=Ӟ3K(:߻8GL o8Sώˆj~g\m>pimFE^Z]o riz &?c]dIG_~]}:%[k[墎"g3pyrdNšg6Mb8߅)=m߰Ƴ 92LY6U[djuY]Ūl TKؗ`e<Jei9tpjIAGW(0Q ,ƳDi}mwy=))!• ':r,c,}pr-)\=@Ғ;}Ӿp6ǓQ{4Y(,)n0UC|6mΚ\sUsӞ߷Bi}D0kvzP@9"B\eq8j8tB)9!\i/>:,GY-d)#<2>{[x5Qk{+?OEVB~RDM1V/R!49)[g6wq؂&vws=HtWNy\dEyw/~ySK̶zotfՏEx2Egf<͛2Ybo6.L:q乊U$.eAB+ 4P!Z^4Gؿ˃^ .fh*&S5Ѷ_8?u~E?_\[F=ܻ|{زv-HMw~GϝYc+q}+ƿ/kWY{aƣ+Пor[?*R#w02ױ8 aҝ*VFeP\B;Ej B&OI  #RH4x$q10#XhBvΗljq>XC>љBtYlƈ PΜ6;v(C C#]PR0$UaJK5>T(޳Kj])^{ 1=40Iby'Oī׎}_zH_z2ފV2We򲂝/x۹L$_Ԋ#_m1HN3\2kuQm>=nx,+- N&NbsF"$X)Z]Bsh8G> -6;_u0B|pv4{ϛkٿZh9VƁDg#12{VD^\0 7My(QoK߆Tfi}-d:J8gXnB `Qrv8k}ݍq76'8WR"ϨTTo+w4Q]d VxLXsntzyV,3] ~d zf{cM=Ui\k{b}O盺1Usm7˪0^(Ư/Oz6쨁vc1g.*a|nMRtY*-ByvF@q3).\/\1>ƕ>*X'˵%zkswqo= d98"JLTز w6ݲOw74uI};6ToQ%^c3%h6A.q{yVg{Cdݙgx:n)]y#^>t QƄO;x>bDʨ0hGO׏Q^ mN DŪ:FKTzTD`k ,89'W},UO("_T4Jܚ8 .(vedxޏpBTu9N"izrq_nTsv.U{tg7sDE)a5#LdH::EJ4(QG-Jã\)5) H3W=P{6oO7WenxrǙcȥMbG%XU*U>o -ާ5 ˣR$ +Nˆ N2ăn[r{)`J e"" .x!kn S t@HkFb<&"d joD:fk$Z%k1$(댨.]VRS8Q%38*H68X`z$rDT"}#x]'; j e~߶=ekOװy`iV/ ~荪l\q|68WG?") R%$)wD[JDSPsD+όFz-!j"D+@&x$^`5Z",oɘ(M`5 rI{Ir.rO9S%aMXAo$;+GE^,SdɠZ/RcVljb9?|ӭ†n{ZRG[: DF_$5'0ӻ>4R?댖! J ,JB0IH3iˁ Ms9$o1^+T1HT;bpl4JR@YìN*Geͬ6qvg^ DC~} {tljI@E9*_0:0gv a2*b~0>*m8]PzKu9Q-A$P$o-#wh$2mKugòBi)grapq(]֦'7r>oo<(ڰvzm|NtT&綴hn* / S@Y~YrqET*Bh Jhi"*bG8 Ps 8Q)qb &6qvHԆi PVp.Q$3zy6m_\]t_]Pn۹j;Fl祈h_qd`I#c Qrn.&iY"DA%z:4z8 ٳe9.0N *oGɫ09|L+]RG]lGl%PP6)jC=m'xDz&Fޚ̚og* `ޢ;#N$!hby/8zўEX"FDu!@<:QY[3&v<,>8^P6)"ҚWFtf>\CS€PD$%2RomN"(8{WFJ=,:8 vh`}1\mYrAe;U*TJ13 ~A*΄g߄}dir uP3qd~Ձ[+נ:[͒}mlhGmtdc="K1Ot %a:@1,,7.hab͎}ol~&쒇y~\# <0:(َ;$ُϔZ˃x)/=]̿| s&ya}ΨVY)^Ѥ1 r^$%h}htVh * UD:,LSi mL \."ɤDV(19q`tԪ ]rZ՞[ח4aבm|6}i6'|H?.K㼢w{jk~1O͎r"!; Py/dm%k)ؒɆ)+oryrT&ʻ޾eLeO!ubo4ǿ3/<#?q8ދUJGزdcnIlĬS'n\M( v=m/cJe v`vE߯~ܗy̾L?Ȗ9~g/y~buw%1OEJ{,(Q&+0ITJ>h<4((/5uzzy=.;}=x3|@Ufż\9zM̲į>veDCqfnfOͦ-CK)P .]eD#R=Ru U^炪[tr d6q(|"<CMqІZhb\߹bGbɇ(H!BdR(u'f| ޛ^RbR R.V0N9XTl>mpC U4BY޽ڍݫ DJvR{T%E52BySg-ۜ]NMo\x}MpVzKrjnn3txO ynyJt~v?K6bgJHHgt:G|ʵTƷϤS^?'T-*t|EtZR*wa7E{s"]~d1gRF!y$Y;P)dO:ExTp=0ȶvl[W1~Lt(%؎3yZZ97%kxrK.Q-Z[TyՁ MAB1Zat%qc ء$nQt!$%i1G9vR'Um:<W~~vm/oann71;qIw %mJmM<|2OCMAM98)`jl"Au8gDi 1BQJHڝD%0+>*sJh5R(7a JJ_幣կq~biߏ+FdLҀ?@~]O;>ѢuN4N\:PBwM<qwm78GbzGPpsQMJ$ G*H%"h 2BF rzǨ2C4${O~Ln ͸z,n/p41.HQ *3̎ B:)LNSL29U Lˣa H:UB-El&:!bB<6# D:A(TFioiHaHw^Wz`O>!{vZ/ pN|D5x! O%Ỷb)P^TC{I:\POMf I1) ;6g6?F۳7u g\K@ČF%l%J+Q1U '@6-ƞ{}O;RG뚫?kƉtSw2 x^N?uůtGDi@%sхskz=dMjbcUqp%Ŵ 5>y8IO_fU9dts7r^{*i=\z%=ƚ'$i|{2=Y@_3 RȖIhE͎ӴZY&]R3|KJk.Ċwr!Q'4ϯN #^^OgW/eX/_߯* k؏&_~$x,5aッ?_oDx1 ?-GW޸cW}h{և;hك &)jլz6j~e0я:L/xi^=鬩.>beR>] ҵ yG:W7iI;ѡOٝNSǏJ,ݍ\q~}?wvYdԮZv܍J{> h(ӥKʧ@\ g&264C@!M}rJs$z1{ra\.s}ijtCqyMb͏3EzCVK=L,NgR?~85u:LjT(Yw_lWqcu6=](4>^U-Kރ/U|8^/yQ!,Ym5Hp͙ӟ{`|QiGcECk)YwhϭqxRR;C"k̫@Co ,#ݚ4.jgh FC, S)h:=V/j_ F-t2!&%vUmQ7,8=}bstŷ["bZ& "i}0鰲CWYaPpO. }5bדּ^JtLB!G 9"}z#) BARGR, IbaBVbZ)biW^V P- ]#)6J Bz+NM5S x| ksE.ȍârgjMaɘ~2& CDV" CKQlҺ@@ZbNhoe5,si3n6B;7-VŸ4^Fwp]Q^y^kY}<q u5b>bjȈ7gۂKUrڃ~NR>Ja*كٖz,} $%YK x A2ӌ KPHJ€ʐ䲌%@-LFAwsC̻P̦_~;ϓ״~05^:<T_Q UhH8F ,1x 9&'$`0(NKnϨGq2״|lׇ]%{H{[SV@hזi[CrRZs& Pnb_M;YYdQ@mS2JH ;$(*lBLT ƌבNMlS` 1j^+s od7SgYKQ^0@YIrb.xZKA*M*yS"-z{ ۷i"&32xilE=n|;7drfߐḐ㴲lBP[ ЃrR$Nr ^yP:&йfդ|n]9hq\NeNz]euU:쥍 zW͔(,ETN-QRtȓ!& eKJ"Z#sj]"хbA\mOǩ:/6@86Zާj<-@IqeBkBLM%WUujFѵݛ]~m98~t&*/vry^)o^#)Đ]NFԦ{{Ml'_Af9__8O]|^%\iv7X@POs%4 @?&I7EJ-m/BGNV?6%?@r8r& 0_ :5iQ(X"=-vY-5-pFs]. N8H=BLM;'<7&'~ 1jf ұGGT0vO dzÌ ෋'ZɅcIΩ}xttR'A[*S3gE [TuEi[5-j"X&\*I !LČ椱 Lp 2$x]rEKl%n^n;^_Td ]ߧkw8^Qp7 1Q\#q)sؐ[Q/d30F:ILƘCsKJ8 :̞ l R7Ft$T9{Fg=9MUh}א}U6ŲcB]1XdYQفPȕtJ(ԪPv ;%X4}R6t7L*ZV0pTPύj^!`+͢tHKc i#trJ4d7d: L!ډ4= @t$❟|׋xܮ#%qml>ౕkgœeSt!gI},:#@)R9N EExAxqG4CCu+PԷe7H߂ȞRJp;[Љz> ⩫qAj*^N,Gnjuk!FXiF6]xVtRC 9ƻ2Fpe%k>{qLhJ'h. -iYvUV[R%nUwbvؔPw uzrTg4V!G20)M6c_@U-rXZ*e*EsB>O \tVB~NI&f; 70\B'IWI϶٩iO9e=L}vYJx2NG4tQ\Qj+++:yhu짷Vot2-\݊/6>2wNş& PMo}].Ca6(t+ّʿ]MK"_P @nf5?#۟Ijԧbg_11sN{&56UfkN&kyqtW&Tځ{&K~4Ymv콎:f4&Lx;id;onQձ;A+}`s[N ч` yF٫ 8Ƚ/Ri3l#F ʰTu9YOh{j✞VdO/ ӟh2Z5K%# \ rlcE9$ (Thb(EàzQzCP7n;jjwvʹor~>4 o㜸K9qG\5&٨Cij_}p^}UA[=EjF)՞YŒ,;>J@ޤh"E8(<\e Z$vh yxFeѩGKЎ\bdZ"P'Da ])*dx >x&y|GMțk5`Bmv0aȄn´%:Q/kՔܪXAd]{Tm=XH0eő:BC;ҷWTV*+7xˮ̠e<*)\&=[P13iiSzTW/;>He2,J04q Yz*阄BC '|sDg/{ozXkӃd\땝 BɈLiOOg!uUOnOBh#.%n_No}wIj|@<'cp/$f&SD@A =l`A{c1ƴ6s" hmPDY@z#|z~6 %, 934YE<)kFv8YpALWF"QkyH"AX!,dC̬|BsAx)oEsd\PLX-).,y’/] Dc2yMDUL'>_]sQq=E2p!k㹳]mm&v$s}:&{yq7K;X`W-֮Fv^ɿu!AA2l663Y$^*F*+94.ǗYQf}Is gH|f"e(:#Ȭ xUh * @nWUY_CKz8"\1{txѶ5qvT>7BO a`2cN]֮_\m{S쭭\yC[LԞ[u6얪Sx&C585!76 |W..Ġ bB[ [L]膭c\E6j^*y>=pcᵳmyKEWTS.#9R7\lͧ^R5a]H^fn7?77jpiܫ XYu?@1A**Whs@9)u$}O$t$Fl_$IYr]R(%`1f8N6)g4(7KeU2 *'t.V*TY)R2L[g>b~opo0LἃSWmtx@x~hEI -y`D4U+nT^Z@;QpBV}n쪃#?¨x<} Hgk,m'g㓰K!jpVMeOtV|#$%[`VzԔM5#V6}~r28AcBsJ%EMϗ:8YXגk[usKkq6 :%߉gJZMT B`>Kx(=aUvLZXHO8ƒ'%hnxA$]d, LqM^bJڜ8P=w}띧hbtlc䔐0aj<ޗ|?5(x{[ Q,o }.6ʻify³gSR_78~{Yr食ptu?ٻ |61{+{'vf s?)\ݥ Z2͈J֭| >Węً1ҳ6v1AmC#M]'wBzv m/;=Ny>,)lrf[EG_GwoZɯgً=]ԏ;`<|_P׿ԛ:]N/{z^Q g“~Q6=ګW;zu%([H6 TJdtq[0t( $3 `@T=bmp¹gB[[`NZ a9DGC=GK#'sQ3e ѕDD[GH Ā&RoqYzgÂOcˇ4M\_h&G'i}{{w` *ߤ󼗒S>Ocv%j9N_ԖgKJrGidT\) @st@Z[0UdPUH.:,u,(.lkݳ !L*]2)J%K&@,ᠽ%b,C-zT7jq,i%:@h1sfn8A$t>x<(HñCGБ:1TKy~ѷOpIke&pPV^ 4NҿR;G8:8,JM(#%D2(TZ$,nlU ZG@:&ey9q)d(F>ߚ8퉣 /]~^o*}V#~԰a9_ 2i7nVt*~@E%o'J9Fx]g ,jwbme6)]"4Moji\dg<YA+,|{7"ó4c65~TM*<@\u? B^%:*Pޤ.mQ_;z]z 4l Rhm":wb53a,ǸV9kd-=oθ'p߾^ռLZb':&$i\ VpZ VGLf3s-+ [gGGؑP]_);= .>K3{0_Սy,n%>_s,!r<9 ,1xEC&l0.(>vV4s7-+ڧ %SgTk*gnVH&{nf|rεT=VYTy ,f Kc{bf_WPTR]#0]s7\mc(K WÚ=N}jŶ1KMN2f'PÓV:0} dѻ61oIt>2 1;g3h; s ù-nܠuZǹUI$XȺ:zB#Hd71䍓/RC[\4ŌfYƣ,H6p3xe1sə6Ō7]]Xvw._Zf x@%|BMe&)"8A BG[] gm`A{9cZ{9R%6ZT0g[^ sep:"m̙GbʚOd. .h$|(:dDwCI-},u #MqGUENKD\IeUtMƨTymZE V)kA 9O$3V3b@9|xI)+^l>F!] ke|ENT,oOlȃ (vYnAY^*'o5S2rZ&p4 믅V,iN  %mVʗh[$PA8;KNoGmm[&v$s}:&Qq7K;N>L0 FoL]·C>>@TIˬr-sྌAW>jF:JE/k}aOfB߆]m-Fߡdi2Jη L\ER璼\ޫ:% U'`&%LI,,rRwHJYd.s=:Nymh[ܚ8;*c'x0]!h{Xk/ýZ<g;T8ϐts}$WEحJ΂cvK){ËI]_q}Hg; Q0|LJ6W]jh'$\0zN+*]7WO9ot^f3t?ͣYWH%+*Y5uj7 w5/ oCK%7χpͷ=>2燃%"[*n)_^oؚOYgEw@[Ċ_yHz0"[n?:n~ nz`Yo'ffRdf qefU̪̬w%2F'p^S2trsUbw`ANǮN}][SI+ %P~0L83gv$l3OVZPZ &u*uZ4mXv]8ڭm>b~F[OKfZWnk7ώ&C&m-; UYi# ^Z:%ꌺ0b2ꪝ0 $Q7Go"1ŹDg?r(K ~eqݽ,Բ\O3"nvEuVmV%>0C|9{*prրOڈːj )!*'II`SexCEBe Km= o5ɡgHJ1&*yb[ȹgٮ"ZLwY4', '=VrǏmTd `ƋTi#QKLՃmoK` zs nJZb.x2! rP7 JS7oCo.7]kިcwwWKvwkRn_u)s=1O7*T5, SGKA E!TQiPL$[y-̴ N0<`< @TeA4V0f!>Txd4y-5[(ۭ4t;$;yJ?MOF'q p4S#$f|mY$41XȽ.d^_EQE"#Kbe'x<4tQtIP&2M9٘U''q3Hc#%P5pb#ȏi$.A^'7G;tKp?깞.Ea1DL2gC}p);m GN긾5L;%^ <*h8d0tQEmkv}w?uVՉl /!3~ޘxb,Q:*O$!"y&PЉD sD)`:1p1NhÝUB9|r2BM4*{KP˜cR6e ^fayhbh}U-q:'< ?FOsPA޻Q5ew2IJ;)%N..PY%;;AU+B> 3 Qpr1NOU8ؠtYN5WO?>7/g?FhHq{ha<e#tOr9w\D8=.&;*CrM"%{eBKezbzvzg?GM45:+Mt6+4̶ }wLjƋgK;,[yʧNeA{*IU{9؝2OG~48z%Q.eMz],<ܞEy"WS2-TmOc^Kƽ&ך75s"_ T<e[<ߥ㆕slTU0}WP\ [/GIgܢH'g- PLЬI%ѫ35 JVH8 1ycTd0{b:^;],(ϙ6N ӐuOCf#WoexV qLR-G5rUӔBt0X#NxH$QGݝjr8=-/NNK߀D x'T0gS4IyB2{{"B;. \!wBWO'Ey+[[kUֵy w^V}7"h.iB]嵺Lۛap/y؍Im/as\$=[icc>Ql j8e}iv]hSd3 &b+&Je YHKT/3- |6$I\PJ3}>con W]^f )'_/l7??O(:9L2R?|-~ "hai)<1!FzYu,?\'9;Sjdp ^kfE4 h$owggvͬfn4TH-Qpٓ$ P958޿6l1utP,"P! F!@9'NY;yIע9Q[s뭩\JZ*dAr\"0(qBȭT:4 $.nsEj<g)c$pwxpV[8QȄQI)Y1\-i.W:\]3%}_#_٦۫[]n:GTRqG`ΐl4EhK,ı O A#Ymq+:^(jgvX/yD8ddJ JJS6e(U"#DE~sNP~,.ܓ8X$dV̺Q :&"9Gd1 eI*$?~3ޚVJ-pQ.-4S 8NI/%h#!4{w@FZ`MB`IJ.M$0ʯTf;6K{l&P d ɚ4G<%#7?pȞ TECQbM,$N{1PLqQfKg(|0 ;)RiQ,#8UGnW9I>#=QȦD*B9a{dDnE߳I8|#ԓ=XYS7ť0ᨔQ8yN>.끞\?Ox9#_{_צw՜5uDb!cqyJ}1Az{aި#%@z5[S\o&7)LJwL%H`lc~ٌOڴ5Ww55gmlWW fITh~ Q溜/gwBW{&j ^d#UFg(MW RhDւ)o Iu, =iqq=)F] 9y@DM0*KE)ӓyE=ҡ1ӎ;sfoIõ + M\%ĢDP^ήӊ]g[[k9dzۢsykM^׺srK`Un4̀ѭl[K*N]%NK-T$x* H7tu (ZxmqjEZS 1PKwQÿ3&H|2A:qC-$דq R]n毧qԏ.l>aon%sݛzWspy_2?4#_)~5MeFr-\nσ#lX5Q i6Z⧕R=m RݼPʣq-LR\!bHZ:Yᢶ @8eK-xQɽ7k$wԭ> LT1p8BGG@Ă sBNynA'&g -73(~\pGF 7|*n˩p} ,I߻9vБƴJaYT~97Z- Q)Te8 S(@q3M&v8hKՎV;WX]N Z R ^ېB: %*5{rl$LZ{-v1Ztc\k\ "8OD*`$S5rV)Wg >. S]RBBA+cC85'W:0"UI s:'2hU h\֒23AUՋ*uAnx孝Exx6_nD0J#PZO.q#rS8~~#j9ۥ6mD%1Ec:K"s Í)Fs\#g_Y">|rBi` yQhK!quܶ2U<==\/`&d;Xf4qR(p/LG .'J΢v<9 N*wH']rj2VJ(RsBK6RQ 1Y圍QXjU@&h!H i֪I2hJ:DHJysVޤUP;(kWI/m=-Hma4gښ;_Q2v.Y7"alL1 r0̣ $$$#ATV S Z=|nn{XZgKOhEi\)|b! Z2l,q%.'UYUA mN ĆRrpY; +jh|h$8폓7RJq!Nll&8&u62hPc&)Q', 3NPlqlP+w鷔l#4"ݞo.n%s\ynYu%e{˛6W˂ {oKuGo-|ڒ'%=asLd\ OÅ3:mQA̓eE;TJRP2EcڨښB'āQ _mm* )J̤@(R4sXM\qjXXmfUBg©DԷ1wE]C۲p}p34tCgݗV|aĎQ، $ +($ pQ@E{@_>i -YsVâ@ʞO$$dKnǬS2n >%r^Q1͎uQEgGtZYp*<7Q[ #xp_iL:)= T7x(m|%F92CEG bM,jcjṄTTblYRO]u͏uQTFDG=">jbҖ {O3,|28 `%2ek!e`cD]цd@™BM{-%SC!&rD>/u֚%⢮r{\|lgYdDyqLPKwTF >fv`Ȣsaٱ.xfgin|K;q î#7q'x?J-ܥ`$a;ĵlW҃Nm{z0Iu>=LZ+;W$V;W$W$W(f|Wo%6;W$+ \m;\=\AFvPgOg\H\%vHZ Ii \ԋV6+XK3pEIٶI9cہ+kpv ; \I {zpVi8s69Tsx*7ڮG3')訇S4:L)ydt~_;քy/gp1~(4^2g\Y30j36;حi40W| ^P W.NA Gr08z U^3+uzj}mvrV-ޟ$aupiW.L$i'IigǷgpVqvW,3z wH\vHZ+HJpJH+fɈR&qA_hZVbE 5N Ln!UeL(l3%`]-=)U) k!ɱT;Q5qV=3E})(-cn:OvkΙP,y4=% .GZ8 " Jqo]:EatFd`0M12UԳg !khknpO]"KߖֶXW>Ʌi)c ^ j9sƼe Y4"*As2bfoM,$՜$<d\j/rd 9:}C0kdfg a.$, *rˌ R%vA8zT\}*ٴЂPjba3rN/eɓ޿fl *f(NꞥAs#kNӿn}trɗU[8n01}DևACcJku)S+'|4ڼ ȁEu2L^<5kfέ]>2T5+fl/.OM78 x<9y{ѱ<>^nmT΅od2Lo1 GFv~f2Ѓ6#sjq{I'VQK,yBa֗jU:d ~y?2?@1|(qN2zX7܉݋SO?|G7 {o?} :"Xn&@M'4j5MVhZ*]^MwvIoHXn&S:l3G?A|/u+(6.ig?NxZ ֣2`VO~TxTG%'g=/vAty=T;DըI9D\k6)2sד^0]=MF7^L[^\T>P^hj$ _mBy*?KIZDV)+?שr3ëm)MQmNUBPw=yctg˽v vuvЪ8&g{jL^Jq7M>1aSJTpq(ChKDMVx~s*8s&ݷO(nhp{:h?y$ruou5:Xu/\TkJΦj^4gy=)kQ*z2 VL3=IZi6OARrOSwH̳i4ʆֲ"n]'4ykobm}xNCrlܳq2PG$Kq$m,+UHyDAxkAQT?Fg{w! e_?|/QiP'x8t}^~Ku鴛Un{eu9-VT]lFDmX%},& NKp{V6HmQmOZ'%\ѠyrŦ,hPĕPV{CzUhr9kUe6PpR!8(,%g %q_w#K&]0VgEG!{(o|ٝ(@1n2dɗ*(l&8&5*/!S3I>)`ALWWfqlP+w鷔X[%O.[B4_67M_l_Ty_]V_joxyӞD.pEhkqN[)~k#ז'ja::׽Mw{[})ZR3F0F{n@ƝG=!{UDuCeG,4kQq8Y4cUp1J/hQ#G,9Jw+f51;"fATǔrojuLZLAe41#9rЪG}ct^VuH&iNQe+n{Gj-֊#t{0>Q8Áq4=j_Ha!tzbkt%FlP+ߠ9f-|ݮ|Ŵj j>,SLΒ{ jPqeI(0RIRw+]hR*$iJ/.P%l=wڐF&^K=K͆~!\9ẁ&gv(}3|꥟&4M9uw1TbG?Es:Qj h4艷jě3mt );P{Em;s;C;h01xV筓&IJJ'0! 1Y&:Tb'/,Uu@/#Y(i*Cʒ[f+ 3N Ҙ,bp`8w mN<|\z-Iw"ks璐%d+t Uΐr:˓3G\!0*mB!ゲcU%!F0)ȣ`e@8Nއ)ݶA.Kxn|1@djDi6&J[(][?hǚOw1lt|E@>k}tr@w-=v}"j`L/h_5Xm eFs& }JNDOKY:GN۔:2R$ MNG yΨ"g.fuU:q6Ms-qtv;'sh IQQ-Q']Hs&(XV9kd}69^@+oפG+u߷u!f9lJ ŰvwX{W\='Ϩuna]5\zXizk週a`4aw#>7$4kF'jPIOޭˬR_qۇm=og6B; ٧9w~<^RRiٜ &ܪ$r!&4?s҈§!bNnXp\ectrkcPGU@.{zA"7FDRXZĹ]Y/@',[{ճ;b`;؍+zdDRc1?HIdfK)7 l\PQ ׺Ǒfs,_@:(9D4sd scJfGq!W5*U=n>nn.9{V/cxoo|<-ϻAߏ+]ޟ5EƩGY73w` Fh f'07[P*/\VE8E(sy1{'0 e>$]#tB%3o":&R_YLst}FSçUu8X.#:,=wHDG #,s.E 9CN2Xcڋ}Rdr\j =A3ԽcxƓ٧;;MOoy^Q,p"D!eh̞|,I9DN2>2>] Mad&8zb^F1Yt B g3s2IMմ}:n;tG ǡ8C u/6; H'񿇐F(뗠t>K7T*mXȌEϵR;'2%9J{mᙊZA ÌEqAz4xKh>{.9S%89ҫ( eGK/ʊKDYi9{8&%D) )JQqlt6Mk0p؄`k;h=xIҢ^To=_.AusAGE.lRGrat];Ֆu)14Z}GlGl^3Sab4g7Q<{Ўҥ(%÷ @hkJ]BxSFQw<]29ه땓+KH@ly=^_4X~Ӣ2~k|7o?s~|:}Gʛ+m${+yrŖ}Sg0[~/?wt:Yr^[?ޗٛ//_K}K̂˫i:?a *{Ndx=k>ț{W`TƯ|je+R`v>5rithZg_ib}ʃG|XK1'`o u`{zsKBdD&cB6@3OUPR*YϥV}@ʾ[ JY2^9|zAq?[ ǒWsǜ\6AJC^*gbvQ !yѫb cx=UVYvzErӓӖ>!9۪gLqC+pfO|Dy.|tn5o 9W1zl3H,@X @(dO}q iEf}xb]7UipfYa'<,ޯ^1`UGX!+Y6HSnH}BN7Z2 _t-$wNT}n?Y/^*'o5SXZz$rZ&@gl9 N?.p YVʛ Y Hϝ6 500kgWvu uGH7>ا39&[29v `׶/=R{`Y +91"GMkPGXws^iCչ; {R7@bDM֋,6QD^#r<$ek=dsæ9-NHԽ=9ʜBa"r^D!u$2BP{딀&,da69*2A92CpR3˒/; 1|W8ö^/;evXbxZǂ/>q-,ji7gU4,Z4ֽ\nb芮*^Kxoͳ^,p~[tZN3[my{W{t@׋ Yzq-7^N7h=סjҾU Te<ϲXYj5c؈ޢV]Gu4@"`ߢVޢI&DW שr.*Jzte] 9ph誢{?G (%3zt,j|[+a2v仍-|-Dp@:aD&'`߾{5_..M&|0=atc'RhqjIFDR Wh-w(a*CӦ1|x`grt0?]= F]Е9ծ眬]4pɎ*Z*JKzt% uy:]Uݺ}rR8un;1Y:6r-|W[Y#WZXJ$'1 K$a}~\ o~'#κܳC)M"5ٴ<]onKn%h`{Zn4T׼\t:_9+$z#U2Yy9"sMP5E("`}/?˽1Xir!:mı F/RӍ4!_RDQ59'lW];%x ECثy-iyu1\9GIj2Iͯ9^]Ϗߞ3lޮX٠uhe- oMRAMNѳlY0޾;WPLl_ŋ+Nu^q:@a,'ڢ >7 t`Tȱq4Q{ZhL8ϝ TKٵ'0WՏ{k'_5^>h+-gBN9?YN*t"\kxc7:s\Ӳ+1q).9Y/o?l5#w#l{G퍠}ʘ]l\lLn_[+זˍF qI~֩mn)-G*^~ȓ (\';כ/nzrLΊf]2~X>.Z?Ja%֍Ӯ{}uhw8Xn8E K|>|]d]x1mDɨ4v&NwtL&7xBr[\W>gUw ׫ZAW/WV>ʮRY"\isg|6N6n t{GM% ьUTv4c@KLXEES3_X "GDW+Uk*Z*Jzta<-|ZWegNh:qc"CT}tSOlK"2>0g6MNl쮴6\[mn Zwk?'n|o6nO=]wkh8wH틹`yyu,եxg?YOX O&*P^ Cz-Ĵ1Υ9Ih?5Io~m15a|}]!E!͓VU6۳,O|vɧ-J>0i9Ig* |v7˅ˌkדqnVp&G4ZRpP( L+Jq }CZ2iDtU񼹩phh ;]UM5^$])Ӌf6lhw(:K+g*}ڷ[57L1k/>xIp F/PdL^$,dQ:r/ Yk.N/g$j}7.9?s<?-j Yʿעxs( pay%E2Vj :(ĕǓ&b];=ߦ`>Y0!QJ %!$Eĩ>\ YOncP:D&d.`1{k/,p" hY΄%=$Z*,$CpD 4#9S,ީM-L[$Hzh - bb BjS(C)2 s%ʆʜ|f8>ВwrX!i 7!4G4CcmdSȲu%6G@LíSGy2h` ڮAiNDL&LxC(piF[bѫkJt:)H~l.5/̬>rDKK܀]@ ^i 1Z ^a9"s1JqH%o"eEk\q/=l-zpHwu(-0N jH) AԂ%I!.#LJuB9Å@$uEAb1JK,oy Itf;!iz/EL AZ3^R@vdv9JbRFvIm2S(!kiG`! - )g$\|`fZ*vZna Ǥ4 ,(y 9)<ĪAjJ<̂d c![IcJA٠*zCV$+NDIgH!Z:eQX˔AR6f5aJ%V@JP""VT  /b5C-x XE%dF D*'0ϊd$!uDR'hmq%q7$XZ BHqϋ P?M2AKUYx$N\ZH+U2%;:D͐jPol!!(g(c)(SgUM|HBB!2U0 RWBE(XyXD@0 =x֠:pR70CA`TG%^bTJk WPXdL7XeP|Ѡ`QBąք !H!DNK+"z#A9#`js`G@(^ dKineG⍐AC@8_UTBNe~d}2hEvU;2 @fT)FB]#!Hb'!һ aMvon'7 n;<3->D@Cе2 1wPTw49(P"Q1F݅Ze\ %Ȣ H Hژ j!YBh B;1!,, W!*vRm֩A5H  %- H/;-JJ"uP:2gu]n63(I@$OEVJC2~ȃ آvGyQV"I{vPfmy1 x戳@0cm9]Ԛ %Ee7h:A- si<3I j@e}c{}wG .fgbgfłƨkgl'm#ǎi;X[ϑ'y!U5>z,JY^ZzQ!adsIByۤG AC:qh:rX ynujZO@yct1O;LƕA-\B7]k!@3 1z`JBc0@C;JsVYu(NpYC%Ykp CV,hpc 1 e2 pkC{7?j |@x/aLrH/#UH7<C\CmQ;%rUH` TA*QFi H6zj;a2Ʀ@'͍0bXsyr%7Sп!7yGV0NPiQ9RIiȣ0񕪭z T=X\\͏!#6UTP%'K1 V'F znH @jȱ:ATg.! $˃L*A".V44',ރruo>Z Tt* `!/A@aQ;X0`YME VυX a D{s|Ȓx~SUROQC`PʘGI%ͅ\4/j9j 1Vjn:A[vUYX2 MFI a? Œ*@oI0JklNhF -;@gt0 fN~9sqb RBH IZa % v5$^n}Sȃ)P!9^@na@ǂ7/v~oBѢ.LTa6SyXm4nӘzQ\ ی@ gc\Y_B7{hN !y#'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN rhN H뎀;,N ru>{ZQug@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 4`'2=@8@wJ 4@'a AN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'`@meFN  4 r jU*hN  DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@"'9 DN r@x;zzSMԞ\37UJqw?&!ǎ@G4.`1.\1. GKR 2. E[ ; fVB\+'A`FN]\-Zw\JO q}{EV 6oGMS.p VBν0鉖i\7F3&ʎQY%ei/9 Q`JwLJe? #ޏT>$SGZ}Z\uRkʼnn*m϶p WOz.чP Z PW}+tutΚ5>Zy,6xsh0P&>B] WZ+ۃ: vg+x.BWAh8W Xe++}.B8Wxqe &l } W(Wd]Z UjA p+Gӽ+|r|7^ߣZ{]JWĕgL&EɁ GM0;ZW@PnakQOjJKQi@{Lqc4'oUb,WSળ^%걸KzJK k Pa Zw\:J3!W(+x. UrC p j PIS28D\Y$S + PQ >p\JOq8}NѕkW(f]ړUgp5@\yU+XQ03bj"Cq;unz}#bl0jM1*-E?#WONi-h7ΞWT+WpԪ#2ac Pr}4p5@\ t+]K ϫw.Wk5^/޹`Rwi WF]♬;ܱf5kT,޹uv@(04eb\rIQAT<L0" X PW+TijRY2v:\\+s^sq%=I0 lpj;P%EW*Bp%'B2l֪ U:C NʌpO[gg>{Wsw\JIq7&'\`P ժ߻B] WqZ=OԦ(BL66\skp{ʭ9y$P`z AFU/L+OUv,OI7:UD8=[Dt \=깶p-W(ש\pj3}p5@\  |+=n1 WT캩4! +'|F+u.BWRyqc6#\`Pw Z+T)4jV2 >|nr}6@/Mw8Pj\Py@ lpj]AT-jkS2W(\pj+TI$SZg+W(Wg+T\JߺA1=զT-r:JZ\ރ誓`!e6BJ+T{G TZO aSYY -6ˋi /º:~ߛu?i^x̷g/h_XU>۵AjFWشamdͷ|s8aMC{< tVx9M-]mw/:_M'߽*tgvu58].Miѿ6;{ϵv~yezN[W}mj^6֜,Cl^kCv %8R٢( 8.Y$$.URW1hkc&ʨT%] |(aSRX?j>'BỂOQJm WŸ6r1F橰GRUaG,ּDBE,XH)Ri^-e/%no;߱'軿Ny AOiVTV-\;/s&-Ao!VeTd:`NwþcPi@( CJ*Ւ2R SsJd.2S1Br| Z/Pŧ{BVEǾv~-o w#Ӈzqc3϶a _w{?gZ5  4!WwItOZģTN>L>e:˚p89%E{m-'|BKZXፉ'on\otٴesPU@6@7K]G}풨%1yH$n$/D4RTqEP%HO|9br%k/[aFԇ^>6mJ*o~է?^خ \?p#XqR-6:>Dc>=1\1# l>7T$ch(Xj#/$@/iϐTKbLx-2Ub+y$/=[}|1E_+zd4ȒS䣕H~R$T,T#2^JLۤ=N,σ7RƇ >1nSUQK @2RCO A.bCUzܼ\+C2w,_mNHzu6#7itp#BeI:X a7 PEA1!":4KD^"E".^0 Y0+zd1 s(Xm'!,yEG.C$~p<_h[DsOswkRgف-_OQ9nJ=I<ib>%H{M][{NOEeFP/)#xƓkEO/y^ %Pa BfcVy<# 446Rc.cA~%OyݟވE-%dzz^H [4$e9}ZCD )$9 b\pLYkxv[ᐍQTٝ t'TBgno/]3S%J[GaHB DMZRM /⺃_qÁ;r|r2BM4*GKP49& Z`P4[ OҝVswlr#Nx`,8z~8;a]K)s5[;e}ɔ(U@}@UҜeIN?u4!d0NO(qR+Mh҂u͖R]M]3^oCLWN^dM<`v&N?tWih l8ZcAwɯg!UVN_' GCn+_.tg. _PIrĨ:GTڷ׹l߽>9Of _wdЭ'/>'/lG(o]D*Xisd &kG%/6)qo]h94sMjO u稬F ՓdXWYB#zu,2^1x >x9|n~! ꂲж*5`GSV'o 6x yvMSOTMߦS/ڌ:Y 1G`',ׂo^tuzM=VeW\8Qevl}7rve=g?.OW{oE 0n^ V:7ޯW[޷i*N?ߕXhx>*Rh#Ac sLx0knu #q[JՒ/_Awl`Mma€cCXՓ7nT92j $+'ͤN(?C];w;ҷW;V:^v`5eDI&4mlB&9ƒU5(DrCzىt ̈́RH#pg5>seLΡ}OS0$@a/{1rDsiz0sX@ʔ!4r Տpd5zwKb-GfDu;YÐLp(5FY#qo ȎAic"ӉB^x\ҥO.F}jz5,;gS"F%u$$ TI$FOE@%Du=Y怅CLLsAiC6,yk}N5LE97 dH(f@U{`BͩBiQliAۡGZ PGlp'rI&1|ȃA; ^ 5NɎuU^}xrr"ޏv9K /5'W"Xqwl= $>\8è{}J-q|QS&s))u&RZI]#g}{zu^n]]xK Y?QhkVqh{m}F]iy>,u8,mZ6t~:\V BnH~gv9M{6rR':zg9j\ H\ Z=`C(TC.fy lZYOt Gc=odŴX*" wZKu mw <`Ѻ$QSvEYS$xUN !7Zv.eclIB8wP IZ{]. LJG#nuS Rz/6xs#9&kr"9֢ k`Ԟi#>7/TB;YJY*`qeEHZWDT.j[9 Xo= ZNګ־Yy+|R> LT1p8B "%{ xقϑ:k֔z.hCL$]TwItOZCdiW *F1s2N^bђrG[jKetᨬ.`_Dw*:z" Lkal:\Jd!*噪L']eu #"*T2t.:s }@1TLD4ǎ ^ېsuKTHkT/#xBPhe&PV;6ZtrָDph&4$OMblsY-_e8;茢^gt@6i{lNͦ'= Ɔ]6bz{iU8|? Qh)Y( 4łHY8ii, j4SϞLh.ҠE2i 9.qBV ֑Y%q1 vG> `\iP~g)5rG{J^"sF͓`Edi6ZRp ّQϽJ 2D8rWb|Z UwN 9)%x4SRS5W ҊR-2B\dP 8ˏMZqp(Oͬ,%pHν"td1 4eI*$˿O: .A3V26M\B_q]H Iń~;݃v^?i5W~/|w9?[|j8C6Z̹<Yf~k$P?]kW\xjGiĦ4ߘߺ+MɰXA6&^U$4Ԍ4A'GW=Nsx>%sVn#_OuMnʫU3O K>1j w5 ~AO`LA1RA o͗ A^Auӯ}x?|w߽~2՟_{w <N6I`lo OCI竟Hڶ4L9k3׋teYސGF[haճ 󷃟.?7gz[f'2A\><a_Ui+@85B XŠ|cbn[أ ~㤔RE`jeq!<(IYЯ9K4PJm?Nz %K70PDD(83:q#YyBq_yfI"^~]uNiNtͥNw]6\gwaV[wuċC~ xc4v`R %O/FmdzQK%j)` s2Y&sO>V$qu|r"-d!vo.bvizlrk4Mq,s{Ojkd04s^}S7exi|Aw!?i`61cӨHz+lUL9֍8ծ7dR^ٺIdzNvl>8q)+ǢPeń G:{rp(-S3bp> b26JDf$Uz!s3v] + tu߈sK_YvM NA>jwMRF"a>CbJnL~\&۪թSr))!ЦaoLb:S# kY4yF"KVtdo(~gg:$Ek"6C2>()DGiC2gGpݞ?Ae-*hp]R-& Ixh$q@kMLh_[rof)~8vuh[Un=Ƽ^ٰz+Omŵ{ ٲjew\/CtT~@-\e)xEЍN4]B9 %Y6Z%2:ǘ'F:L딎zhrNR)\HMOlqT3vd!'-,|Tah[RsϮfaPhi4|zgOLIо`e`h6(A_VG!L+IQR0qJ%Q3u1Wapr+IЦOl?ǵ!fWZؕڴ]j/von`qp%2`aw֠ uRuQEɃ98Vy.@,C*`xHP,1 OPcT'\K"tOp¤SJ @ ^J᳋Bk婑j޻Nǐ`4etB biw'l) Dsͼ Ezai䘒1D#dH"-[=J v0\u4/ɸXk_[x‪82+ OSRe"!m>~PʘgB ><_ȗף[BaBut{fbi/ Fi'~31v&WYUYsň.{1/6JvҬJY$ЭdȕV^z[e&<5:Jjޗ>qUZ[w^ R p8L)NIqq1 ﲐWw[BSxACllH93(3#jU~Fp%-W(WRpj:PJK*W R^ Pn9뭨뭨kz\= epM9(תRpjF_bv0XpA:"`*}j;>ҾZs f۩ڑvt]r;p};+k}& U2qAbw&v9к&eLRbl`] @7gh0ە>gUk-'VY|g [IACl-fr)ejU~}Cl.պ \`D1B\+TΓTJ)5-W Xrpr9-WVҮ U*q%+,\i W(W3D뭨kGz\= $p" bpj U9(qu<+Y9oQ(f *u\J]#u2)irP 2vTd#ĕ%Mv*20~''FSmb -- %B3(A05svޜ9C7N>vjoR94[ڵp*W(WRpj-:@&);=v+c]CNulZ}]_|q$tpqzRC9Vߟ. sS7f*FKouy- "T6܅ ՆR6YniVwMzw5Ur_6=hpvp-e},5_^uZ}5VV-;EZ_{ײvv5n|==C!!uFzZ?tWx*YYbb.T ?ٙcRz&8gDI[8zhP2+ qјro6{|!5~EvgйLSyj-%J`ITB3JpMVdl)I%e'ѢkAr^zu֌%)m]3BJᨨf* Kb9Xb>Lf cSJD34f3NYVIʔE8_;=-Ho=L4Y!)!d*ҡaӮ/4)hR!TT24qYet,yrh.h5Y`^Eu1A@~/1_7Yxc*lY(O`ɐ>% VW2X IϪ,U%X3FDE*Qh:xѬɵ STBfu}dsa4n );Qƪ)L>b##+$V 3_G H"N (-r VؔjH) AJM!!S`6ՖLpRTD@ $X'i5/f$^f!)фŠŦ] Y 53,HX& Ht=+l )D!; D{%xs"$vTJD|[(! @`9K-OnEAE E'+9+0O=4 /5h;U5 <(EJ<Īɖޕ2A[4LEw&#%RI9n,Y{yD)J6N BRBA6cueRsORQvϪ J[I\l&k LcS[ D:-dJ9FdYl@ $DOnwiXs6jE},|:omBi!:[g=a/nRY䤡zAQBu"wi$% !pB &}fE< Mg!_\7r˚C|}O׵*hږYnyy m|ka&a5BW /MpdTYd:Prt9| \zU sc Jo,rk/,J脸PԒ"4I"eiyU,C4VLta,9Ѽ<{*EK*BzP:G͐ B@8ͪd!*T?y* l+ɒ3.$S`U0Ov?/Wyw|;oUD|ҷK*a%VXD(;Kb f R;ȋY B-r!P-fH Hޘ jyJΚ%Lň Q y.!z b@ l+ jw ,u惙U3`0bduCJ&qH4bl,'r(Zsl<M;ʾB:kϢ;k4Q8T52˱ o JP)BM~뭈 KUm,e Y IX@jT_۴ײg *&!Sj1=6MumWopӳ@P߀boӮWzù}?Gh0qfQ:Z 6Q\*)]Q ENiVnY{Z(ΣDjAKDm1& P."z~3=&2#BväDy XmڔRA6G=#ʍZ+ hgr9V32TAJ`PT YڀD~lC֠7bt6ـ 'V$b%*cIB\$\a~{?"wQ^0b zYQcԔ*Q f:9 T,+~x.]ڈr!cS;SvV댙M+~Ҡƚ5A*Ei J>{jgR &C@0 >Y tStrgAVA%ob JHN!@6(=< V@P0+EXQI&Ezʨ}!1?MД nF5>dN֞ JOQ!,6T*`$In5 a3;kSh\.CC,2c4&J9MH8DiBN 踒#f !Kk\F9oj#BR!(x)XowL0H5v[Fomu7VVqoX:)'fH"mP]%0oEqŋRhCB /|wrX|o[I`X|u rmWM?V?-M:y`@o7z\_lz+ټ^={&;+D~ WWX+m.^̟-_nChzܟn?Njsqlr0C#*yF*P։c:ŦsM{<'0wu? 9'@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; uTFqut4Z;Jz0 OsKW{na6-w<ٳ=zyBvv!̙gB/x_-.W[!} 2R} HB-Z16(5| PAZ# Plb(6@ Plb(6@ Plb(6@ Plb(6@ Plb(6@ PlbkR P 9~Ra(Q (lZlz:B Xc N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@zN Vt1?vc^]]n𿪛Kg(s9ޣ߼6ˎ>1Rߕ>TIm,L CS}mTr4ʖ@Nپ3&u(;X]A#=ddB7Ébƒ␂QCYfU4^Ώ;Q )~H2g0ޫhj~T^#*;`㏦p8@ ڑJsX誣5UGw)ӡ+rck~phn`tN+E?R8FZ7 {w.|uuQY 5o4~d7 ^rH^V\D'pyF_/Qt8eP:^!m|/']ת7=;\ɱ[nxW_)?dDST4KUoܱ7Nn^7~=~3kvrbv=lŧ ߲\Fkk~O&OQYOCvU{*):q+8']% Jp5;qh&mW %aG *VlU鹈+ hJ(kv'q%0ZLW wCˏmi.Zt'q%qJ=rF "F\%l|W hvP*܉P\), ӳW 8UB{,-Rw;ߣHYF},D\M`iLWsӀbv1P.xǡ;zc"eg$`u>*=u^h1mW %՝ALmIW 8Ҽl g-lvխ̖g ְ4!Ciszvsd%F7Ȋwi1Y^ʄе327>oëj^,-2yX$ EoTbA(耼GHLEB"B>7%',uu͋_.ZE?y1%@ip}̯{fvx +A@MYau7/y_o&ϟ |G3Q◑(2+ϔeT0g~5ېlf8|Y ,ywӏ_7aQ\ e0+pr|G@ 4"g j p{ORκb޾,yKN(ԧ@mYqA6_^r$Z!2yH"QoՌqY1Ű QVs9fA㍹K Ky1 C6BM[v2Ϯ ᢏu+¥&LI8FҎY೔ | \KLR5lL'"K?_ 525XY!K@KP?IiZ;[~3f4s|?ʕ"]Cѡ`҃+3^й%$=_E+Wz*U~Zr{}H|*Z~[6A T cDp0YAbMNDgґ* *ƨrRg0Th@KVE#FyR;mT }'H>YRF5Yw>Yi"@Q4HG O+AXaq#,ǻrɒc݆]&3MB=毯v&X .0^)j=ly=-?0XLGM3)G&ţbR@H,c&pKmiT׈ Y` yJ52Uc9q58wvtջb6a([rWiKiSN5\ʯty}'mu'}V q0g3#ع4¥fax7N`~w̬D{IT*4'3`(Lp_KF/_z|3Y"4`Ļz|#ݯ?0߅y ¼y/w HJyTFX*%o8(-8{`&*\®Z(Q@ٶ;g 1>BSdn02pWƆ#~0EF3f` EiC{0|r^[;:w%ՙNomwaVɹw/훆?6m%_Xn {`#Rlފm޼>fUť¶௚zV#6׭6/u"MKjI ],)eȽ-992kB2e"ØgVi9E CmniDv$c%%:Q(zb+IJGas)FxiĔKL",4{0b}D2_+냉KM1ݱL #(H8H7KwIָl2vJVtp[x켱ToVgf~]ԩyLӇzCSg<*S1fL6ĊGMM9:$71f9<1fc&PfTet`E@5XJBgpcpko`ߢlW_oSNvEgYƲ0,w`;r:;;kKy^v\s#9̩12Zlf,8"`ur5X3(܂m$Xݜ/JmRxF5?!"P*m'E^ݲԋ}ms9x2Q3pͱ;bͤAQl$+0&L"NQKy")T'"/l N0@08CxX~b'%Bk0xD7cQ130Ш`E*L<)0  yb3 5}18 M@H;qӐ@w;Ds%rj<Q 9.?Q f鲶 yO q1S137"Gw3 J a'iFk)EO'y= H &Owd#F:-<| )$9Xc'y@ݬolַ&M|GCRRԴe7!} !&pY! ((ώbDY_?iӎWGӕHwř/{%.U.v0;OaI-}2 zoO؍ٯ__twӗ=7A'8 ~_rde%??C! HcI^]x!|.~tOcg (:]럽4p}_SCG8)Z~A@yhQco{?f7 Z87㢮ƾGDTa6B6" U?k%D񷽿 F .hnOÇ0t1yEDWRCY*8Tjh[m-t|^$n]6vWԩڱ(`y_:zH=\];]퍗) _;}iw3)a3/o_ #{*(M8q\A s"^u,6)pP}?]|3 <0Ǔ a|]9wկ l_AZu|(9}9.79K"Nܯqi۫|m7\ȘZpkki~G.YUGni[%@e#C*oѪ~A;ݵm bP6yKilRP롹>޼Ӭf'Mx7:S7eU+'蒗]u[h` ڴFp>ɼ80(b,BZL08gۋjh묁@;]FAc̉51(Biz; <^/IΜ[Aw`Psm mgœȄT |zw`Ŏcl@Fn0N&ҿW ]x?%?s92( K_ɣR*E m×\}Q/ rgrJ=C}sb薗eˋaeChWW!_1{*U۾@5PUio;hV>|꟯8i֋ R`4LlT=f oA.O{_z UjNKz^|)$2La3kLƙІxs-F(D|GuFp֞[ek1cO mETҲh`Q\@XQ]{oG*;`GG ; 8gxHl+W=CRLJ4ՒHkD5=]U#,$0bխ@}ʊlEI5-:I%Phh,BjlXHӝXȕKʯP2 )i1f-WqK;rG|kOttܨh"SN%A\L(xAK 3/N!sG9"UL/u:̲NnhE,u $Q20V&ˡDD!pn##FHZݛ;AܷchQP.fڧfM󞳈۔2xZh8Cb2  ~?^nvZR(a5g) R .Iaف1tKNNIofL\&l*3726V,Fe![f*%2B @e0luPj+~seK(f?>Ó92 `Ҥ&d_Y]Eg.}Y&1ױ7IA@>s;)%]?$9HLIɒBA@\59vң;"i]r:vЮ*EH"}8Zr Ic&k8мfT`kJ:Uuv*ښ*xGLZjE6I}s h;qbهĽsZ-2L;Ogź:Pr6nU=z"Hez0H5<([W Z G54r&k|XRRKQ[Er?h{uoVg&+J&͘Lfɒ3P9h8h/pPoд'y0lm:HԡJP-4Kyr#gO/HܞMJ'iIe*=:Rp^²鶆' (Q9Hajk8c U"eBJ&8aˉpÞ(n{Dh r]*K[n+0TUȭ V99aU@t C>0 jTe IV.p؊]:}{HJ,H/ ?63p?Quae'«/Wq쁇kWv"Y1$ʙӟ{`|OTx9wj)ZRೋ`d3޻DB zB[* ,#=4.jgPC,T:J*h:= S2Fݸs5~\j̣#-ȦH1 .&<HLk4^gdC;j !H)) N10!%Z)"X+|DUUȖ U xf,ULTX.}/H xP 0+e)"7ΣhTnWZ@?s0yR.ϴT6@Һ@@Z1{'47Fe$u9;o^{~ #VEjgP|w pYv)yRrbNͧN3$ll0`4wY \~wjӃ ) &H/#Z<".&Q9Ybr,Kr[+"Ku()#jJ/AUFTqpZhd:9w#K5 D7uK`){X~iY_5_=|")P ;N06]i/5Frgj@Iנ,e4^ Q󌗔vέh: Ei1cd.2Da`3 ʊ_"+50NKI,,ޒA'K4$.2M2G2kCPymЕQg5r6g*wJ_;{?>~O%M"(=f#:=W C2*y|4n0Ԝ8 Ӏ*[I+^~ɉCOLb$aR*zc sjs7ID\pv+J1]G%/twt>lٻ z| ӚΗꇭ8/ZJ7pK.k.d`y.[FI*(xT7Q1hydCUYrA'vabF/:6gp&E͵t+pTmX BajRJVY^>,\PT $u'˦m{s}z9 Փ]и<ADeK I`ʗ huEIsVwÒLP= Yb&hM&ێY,0ŹC,[{9wKl?%sWvձuyVPqC OZ;O2dpڽ,#YiX)r +:dHĂIؠ 89,Qsa5rQ_'Ò9D>DD%^,8mseh,0N)0EeI %"X]o'cD1ke4 r3>` 02R 0AP;E9wKlj>'8mf̵SZd[+E^.޶3Cfvl`ЀrĈe}gQ.(dV@(*^.>\yXmul+meyeaI$(^Ni#7j{؅QюяOHr*BC]FB8wj{뫋OBsSYKQ^00j%MĮ]X*+CX4e^2%!EG=E9`P̶l=ڭlU.%5NpBɼ4Tw44ݱiicձcv{}|/]퇖C({]5Mt3,1EW .mUB)qGW/btb>^/*=/ɛ$#߃UIY ƣPWH0žaW_:mQ.Zт7_ _,ղ" MyS\:D̫li0_/fXYХ//~{;}AvւO;Ƴ9ie"O9p;1kj ąsc̱|> ACq}#.\Y9)0ޚjIsB-~tG(ЃS9]w;RJճ xR3Ey:PDcբ'mZucg-̑"ka*E 0eЊ&Z"ڞ`[bZ|=CIUb@j݄/< D5t;-f?eHtt,tVF]%#JѩHWq &-JIpn ]ZAP*IWi"Q W ˦UBIHW c%ڤnQ6[Y[*'wtJcn.aGP1Ңغkv[D @WtB(uG MO~ t`}^p)ǥ#~(v ۃXGWz' h>]%G=CW 8Cٴ_*~4p6g|d\0=g^ZpʝU3wb?|>@ fb?@/ȄZ VnbA>wM?OFJu痱/]ӕ6l_7",7$PMs/Nji%wEAa}TP㦃|?9s,Z*CjƢw)^c[$2\J@.^XW)܇'c7?aP =0>QH*ۄ A! ¨U1kQRk^jF#IF|?*kdl6V ?X^\]q'Y|t1gx ? 2]/rN~ 0/̪Vz۹L׆^Uӭl'ɖnUYSqYcNK8syflHNPFfYEpα?0`*l.n3Ȗte:R"tstL*"Q* J3K@4$p p(*/Oa:`yM  JR&# =fb0085vb~fY]RjQuKb)]'0o$ LRaz9?؇{RH0BU +$c S>;9 jT d+*,F/΋q0.FOЍ {Tχ߫XCd#03 }d |#c8;l|UDiB \{4a Y>pM OчY՝"S Q{?W. ,rA4- v 0ǿ뻳7?&|otͻ3L5:70&6 |UGպUTV/_.:ޯ7phYDŗ#`pt|9Bمe0ig7s&xŃ$]|W0/9,@]Tp q2`KG)N~YcДc#{I $gI!bNc,x FðE*& D\HI\OzÊbIALuly{X&q|j} &|fY68o7k{P Cǁm3K,@TFaf^# !z.uZufahE&7PM܊5# 0v&OUy{Ol#s:hmGʻc0%KiQ?A5FY]?{;a쒍2~LpUhRouP\J$^U= TMH*eUD?x J C?Oќ@|ƋRAl4?>tR!b=!hkpR?-,Z:gE6Ѧܤ;; Kw 3wAp2 4LpmK!U ]K43 8 ]N.ݞo`oߦO6$#C޽Yt;g9rT<·&cw:ˮl1*?l>nVvǼc4dd? FlJa^iGjd~sVk/;^s%8q*ՍwAUw{EX;IڍC_6dqҧ>l%~ C%<lo~}F[}S獯cKwwl`~+01pc@Ofl;;#Ż`[`{*W[oo-suv7۞fKV90NSiTɧT 9Wjp>CKQ kT V0biCڦGm󙻬7VMf*)5캈ib6/ &?pvAzmfeӜ%gӸF1fcQE vSR+ NrC Z8ģ'cv2d@^S@K$Qr d251E6hRSǬ" DAĹ[R_9͏"b@ZXD"$2㋅Ozgs0WdS, յvcLI:)emD.8t=Ld'CFCs?"yU{E-^:k͒⢮r ꐙ%H'3`]te~gQ(CiV@ZT.fcVG ]jؐ<}j{Gq ~ĮK7q'@ޏ(9IE`-~>IEZk^?L ^a$)V|o!'w^m̿|S&8qXmCY6^ l4gTE#R28mS2JHI"er*p K҈2!U \ dC`RfGJBḋH <7+H1HQjVcWM.a|ֳ(f~NэfucٗiK.y]m0ojB|Leu2dy Y0xKA'*yKXuilZorO ^5-Zю;s s 8g lV̦s,:k\ck>ďCr$kP/Iv :/@Ɖubb`+*DTFkt)Iz!&Z9Ne,1VO,qƽ+mޓ}f? Ew}],kR{l;H@pY]#bӬYW_s,!r<9 ,1xEC&l0.(>*KK޷\7/' WF9}Y1/w :gine⮆.}\1}M9W1zl3H,@X @(TT;*/ࡨyō4A2{/|p'<{cUy ַqtrNbGbՇuYly^FnrD'1; )ST <I|:|LBjoځ8I,f?V=?tVynUR` ,6 nN+y{w$?GRUWhfe<*̂X* ˞Ɉ;-CKδ)ff)}Cmf x)K,yRXp41e.y#8A BG.K36 uH1͜gD *賭]ɢ8] }-zm^{k3s&ѳfS57˂ O'퍢M&:Mt8DPbbyj3T>X\dNl ńUBYRh2F7lW` Q`7ZyT/BU3e?~W2b s1Ⅾ|6?AzI &SuZ(B=;ѰLzbC]y]~'vYgIqdݽT"Oj0eŖHLRt&*o?:UD>>2=!m8msvӛ=ӣ;\?_*@wos Gh؞K/&f򥂚#,o|'utH_\!þj~mHqj9%K} 1 rg&RV.y"3ʀ^)A:p\%!}fjwӠ/zS+7|gm]x%пrhYZ|sEu]%1oE۬SZ/e.v}2۟f!ln{zj|]*z^k}>g-n?.s~*זg=w7/Sڨl]NWMw=@ymyA2[(+q42K"1}/Qz( eTroq#Uv8McC 5:Q#AzMȬRaM݁9`+J{}"]~3V1'>Z\by>SEl1@VG<71 id뤑0^@.ٺ6|L麶^ %@PY+phVIgԺr[q3}-A,j[X;*0USMQAӡuPPLd !H3Rj pBqxiT R!k' gNnPݎ9zBwBltf\OϗDrIiٜ &ܪ$&o dg`NCb"$eU&2N'ǹv9& ܣ heOD/ HTH*Wp5q_wY/`sݼ)ѮHqۑݜbyƸW @8\Bw?5ߴ1x5"܀`qAF1b6^GRޥ^zuFɾ- c\ID=GFXN1C9N͎B&cZ2nn;~XMv{?R*gv>>wؙ;0bs#rD`t-F(UTɳhEfD(sf`x>`(CAD Z0B'TB8XN~A-Қ@DBiG'=BJ,H;>6-A-,Q*D=3dv)brr^'ElIIQ34C=OO|ly^(A\hbqGM 2{yuH!(^zFq@Ze%iLʼp{t[qr`8U/,I94a"Ph1zLb1q!`Y&ɢV}UL=ZbZlX5 'ԃnpU`w!+K5@,.Fø Jk!v'2%9̒LxVtSCt?:{chy$eϏ7n/Nϭ__gw_i)#ܼ֗.ѧYMޭoߌӒoF]_M(xj%coF JMǓE}"IPOx;>.~c upE@,}GOfz+HUmj h(Q\/Em}6]FMZfq]~+[%ЗÿԶ"z?b<9_X!N')yt6)]KGWRݻoͨErm9XRe˧eޯцKpu /֯vl]m/ekJ `aT*~6okxSޭE_b_d H-GӳNO{\{:WnEm~qb_yA'6Z$Oh<;AAbq dUXu0\>NZͭή=6R4wOOC+w[IȖQћ<l1꾩jm[?k_po81}fg?s#;bٴb|=H2Mc[WN瑮&Т"Pp"uIV6s8R oXI9 |ͳ[JX{*j_wQNoa2UZ&Zc-|__m{ۺbb1|s$@PˁJS !`:T>fCV$7Nnie7a;EmK涛uC1HK=;zM=}=󷻓3V\tK*Y&j -v;n7EZޝX_^ܟ>nV7``nN0Q;Skקl]B(p.{T\(ZTVNQ =[Y_` #a~:eA:km,*WEu̜0#ts {#w7=u/[a\j{y5l{tjyjB_A>E8VJuN1/Cd=~0w^m?oB|t:KBNF H@ X7ԶoSνluf?%CELR>Uu` ?,IփThYI&Q0ݦ}΄EYaeASCeI,U)9S.emN{LP1̜_Еcߚv'}t|tT!M IW1 M@NK&` AC2\AD$b5WQ ԍe0c;jgu7gLK?/FO??ԫ󖟲:k5+YFFzgygNg6(Y+'g[pgu׾fW=;PT9"{8`XX'<2*2YCt0@ޱ(IubP ɚRZ%TT =9hѤbul)MN$IC/ R]#cg)W ;{bc^pcQ}w_md1)7?d4~2}_9bǨlf e@& Õ-u,(C>xr%NuXVI 5%8i2 $\2VhlS]s3b4mI̾vgc_Ԗݢ~j mEMha.yiYE;>098 ({{5Zd|d VQgX82FR$"l;3f<\|6Nc_/"Q 8 Q,%kOgs>N"[2 R4b DN1!E+dNT8R sRHbzLjؙ97#⇓Mʹz쪗싋1.\g<",3"5O4΀ Jr]B1H|6 r`DfQxѬtj6\ڢCSz;>ĨpZKDE{r>KJKYÝv+5.k,6$iIsؾN0H +YrUW;#?}1MZ٬rꡕm,]:,ja%xi5v:w: 쯂V{q\Eѻb,[uZ;/!-;܃^@J7~R 3AW0& Jw2>] sSSj%6O\Qެ]_:<,OCMfuH'y::DŽ揼>cmMlLLg-jWwBݧ >xmZ{ZS[usog)?:Y7w:7]z+*70gv;{$DHS=6Wk֐;\"Y3xSdDwsqXc ˻5{5Ի5Gkm/[#\.*`6śZ}S>{ Eg)F-0 OBJ*`bY# `\!eόxSTr<&AQX|wvfΎkς :}3>ǐq74_+P2~.9atVfg(Q&.TJ6{F"Z$a]Vc$@2~|}"0=u'fh5B[HH}[뢍lս[ˍ*4nx+ e}1hЩOt;\||vS!}uK1Jޛ GfARNA.28 mX{Zêst}Šԧ=?&p=s<=__wLbx?hjs9dqtP%UUMD4RZB,0Zq@{AڗWx1"TQ)gɱw) _'ѿiHO`{7-C>[KYNϙǓ]u2{]oגQ6hfE|Y=b y0g}Ǐ:i:G;lL$WO})KAC ^ߙ{~ᬗE?  dԭHQnn6/Fvėh&0rP*JX_KhMEJB% !$H{>;uNWtCR p~ɢwgU?og_?/s%sx祟R8X{Fi}gU^Ӵ?qb빛,,F?o|8T[/W> \f?:?gOVs'~^?sKg}EZ|"<)%FZ|E67),#U+P_?SJ-gKυW߅f ! o>lS㱶~2o WMo=w7g) l.x}!5}-1o ϾY+2)B&uC(+n@y!ՠL d޸0p\jgX]O0RSmSq$El"{#ƽ1f0z}"!o_4Ɂ8]};~_?=t5-v\se>Вwc*s~SZBQew|;4VsFZ/aC% ;&Bb&jzy@ zc*F2Q%|2ݒjsxœN9c?цwlyAG d"!=5}0,*yq`;.QHX* yaZq+jBIE tt <]] r1ĚtN,:O-:XςU V^Z.^0JAƦŁCoW/WPp!}oA ;Ip1.m |VpoVB6Zqeߗ_;fVC_wPnV3ɰ?*t*p2,ղb/&\4$՘ &Yp0vsO$uKunU:M7r-oyX)C>g$܊se"0d!p6on &o|#Zdm}3ٚjmt(ȑHj (1"ꍴ12## F٪\okHXG"\JlB Oc*5 Z^n8zo\zP!Fp9Mfq !r(b&P:&Y mf1*00JMBBX1J9Q5r Gw}wX"_8P#2roE>DKiV"n&`Hm, Yv,CG?!&Ls1a$:crTkQ<*( h<2fw0̱ԖFթi y)mPZ[ tmM矇55 ELwse滓pehƃivF*>j)%7`(EsfA#9k2݅CmQ̚n?ܤ\r[r V M`'_3)l" h*=_H#"9V7Bz|T=>#v^L"?PX$ sE9}Q`aU*YNW <%ad }!m8lD$D8BE4F.;l^1@2(l 2LwZPI"^ˈiDk45[!-i۽9k׫]>OXrIźGB/߳ޝg&A"$X)jTUMFջ~^k} ϟPaьtur{94>\h13`hr-XERp\M|Xs+*Gm~ F ʌ0sQ2T5Vӑ,JEat/ $$dMLVQ{Ix2^v|f!`[p8?p K%֑)nVeR󨖢  x $6Zy2e`WR㔮DGRJ$D]~k㽂_U ƘJuc^5ls;eͳmok|= %F$5Z ;I/NfR;Ҭ_7]Ⱦa]8O#D%X:oT0vv efNJwII @D9hkc L0&;GKm[b@l Om?jlu` P0vY4a#A2?9HysQ XeYtPeq*m*mnKL}(xmC)ueO$R7}|7ĉ??L~Nz2Ta9n+\6څmiIb0=w5 ggWȨ uh#92 c[yf9' sYٝ>Ì(땴ݩI"FS-s\Je+,)Ib̥=ᥥS.1,nΐLNxZ{+]E)KU͡z~fha|n&㫲Z+o._zʣA{X}* QSdi%>T̛hqNc3Z`(3*h2:Z0rH5XJBSEL99Q=tP:26Eo+:rOenPkKBL&ydBy m'X9a F9;6E` &ETҲh`QҖK"1* `rkD&e\+a -4 ?,Yh&< xNK8syN3`vDfR`0#H #K.b9B`:[p!j[ŧ\u;"E Rr(TkqXG$"c e4 J,iQfu#)V!"A )H3 1x(85vb KrF2 ur?Kb)N8!xBSeLmtwDF('{ϥ`VHJKǩ`;r̝&2ɸW=D8_jQ*;vאL)gvһHrMɖ|a >X!aY5VޟT>5J05.,yFٯW DoWɣ0y"cג)E 0ûԐO# t- ˵rmWڮv,a+T=TJR"Dt"X)-dD4P˰8щʹ5ە~9%HW!]`aQ c1F1sɭ`)$Ť313}+M<[QjAT;ruS) \xĦr5ܣlS3l?(i侯¹,3kg.#3ՐHCrVBxTfs1@b5HimtDND3+lnT9L` ԡ#uH zyF8e|di16b.aSBC $ɴ"hX)Q1wZDl ha[UhNPƖUYCE,'N]Ͻȑ#|*e0,Aڛ0*ϩu-ρв:{W5L(9ÇD.rw/AjJ0>"qhU"WcWZM]\N\@qRJ z4*+ɱDR.J:E+̰vY'lR-:;;Ͳ-?5C3D#ӛϲ`\60`cxgHqvF (B=}orqŏ1V7D0axbi=OVQ,O?{Fm^ dvg`>ecI<ϟb%˶V- "YUX nnz۩h<*{6*HwW/])CSR]̆9Y=탩G?5 ԌOs>K-GC5at2JxlR5*(>.cWz[Ի5?q45^yg~{IFfdzU ShӓQ-L٤k ;ͨ|?JV9^%c>&ٝTV1*v}UVڻn+jj j<YTyFX @(Ni觳 g)b7vi-~vJ_h2Bs%1*c(.y6!vWbiއEʕD@bWO&5CO4%3w>-jIk_s"o M0u4d h%問U ;RYs휴b쀾yFڜ[@/uctwϥZyׯ6!osXX)sHO޻+*G!JiPA:eG16LpltZb*;(Y;-&꠽}`޼rgzo2bѱuPy=oXa=oX1xt 0zDOc'xz"'yH"Jxz"'xz"'xz"=ū Wb«o:}V#2h]W4iNDm x>C-{PH\ζn+Y㋮ өpf+dCRC`@0ʄDDY"%*Ph 3;;z=V*f͡SIxg2G ZHyu ;8z;Zk+Briv>XS&,X=3-eP3s-<@`шypNIԿ;/m7Ҵ!%,EAGdKdR^AkZ7F&xb.Z zC֐(4]i_kex"L!peXFv h#[OPjÛkڋ@?{k(Q'M+Oi6|Z_s.7swgݭ͓:Rɭxqx’Sp-%-CVŸd|2ZqtN`:#7xzFX5[͒ҭsK%DVش^\_/i@2?*zu=rt~bsxÓt98s+iFF-ׯK#q8M&o߾cdfy֊cZB,0zԼ{0{Iѯu_'?^̎ޭ59u_<~:ī]IՕx94~C#֒-Zi[3mfUU>4>FbY'bWmO;:sꢓm κi䧅'ً)y<(C~y/~חj҉_6/wѿ?~8wG~zK_hf)6Y&A[Mݯ4v4hZZؠi)u3rKG+aXIq׷~8 zw=f՚9Ieč .>Bb.CT)UJi]% h;Po9qc#{L=N_ I)삀p=HPPpd>;)i5ER(2e}"9>NzUOIlj(x۾\f 9ZHI#/SM4Y{K]yI&\tɾ4.vTkߚkL'؆[ n8/o3xyrSjFKX՗Mb &G ,R }Q)(_ h9<"N\9ܨ5ʇ]PDr^UJ -`ax-_Qli-H?AVt$'')܋F+OF#s$KȕPVX)A4H6)2-ǔ.} =rEf1\]CBw$w&Άw=~spw?-0|rMA/?D!'#1BWlV+q7fS(}?*L.e'3=j':*}'txv~Y,ڐk1}퉺b:8O~eo*dP_㰘WEBTg2J^AģeD{!%+6:ښc۽1K.P]LEG Τs.3v&;ҙ/L3u m .%*}]d|^rV6_ :qPhe4||fG!Ⱦ@!l "b i ZB~TvY.4W9iκݙ0`EB66Ah2vb4.ɭRfخwf~'bL;ڼ[|_{ۢ8VPqCOZ O2dp^f db[2@Qtɐ-ApLt"s>fY::s?r|X}ǶQtE{xgNA[\-E#\ƒ,920ml2MA"I䘵2}M{l 0\5ClW'jO~qZ5ƵWήd[;_K3Cfvb`ЀrĈE(睔NBiL m/?L;c% eUX tN5r#ƽƕ28{"Xi Py_.U7Grͧ;'+:[$%Ca- YY2EZ :'}˜U7NAo߃R8@ZN=wcF8kQ윓7Yn,!NFi4GKMKeVArrJrT^yPNxl ׌nGA7o[pv<kYiU\=@/{.m>=ۅr]54 ^2%!EW{^3lnzO@r.:zT+;F8 %pT WkPsoU(pLV*UAV̥@(a(>|LOwv(>^P|f[2\Z[&;dV %ǂF<˄N JiƹΩdPFPRh@eHYrcr8 Z3w{wNv&Ά%ޅ|rr2a%֐aV0_y.?Q}/" AHАq FXb<Fs\EMNH`Pg;/QWSo5۟\Mw'د /Y-_7[bWeۦXTfňѸ#XwFԕVVb>ZrO(yՆg݇d^* &|V3( " ;Wjyx@gCc9qD_MՄ7>-)Ztgǫ)i~.) ß?^bE>O} k:2..AW>jWvr:be#{vO[= |m$"f0S`ɢ vzf6Hr6F"mʒL٤4ұy֩{A%Q%:`LvA$h<C%2qAqXn_wp#<وH\+ qh\2k-w( DcNwLzA c,%{|*̎1қa+mAb5:u'ʌyV 7j@V6P1 yUqMM޼>ya(?Ӧu T`zG0ȃyJ. dQ@uVpЛ?åWg׫ >(†tps޳ZUͳ^rXNg pm] ;sٯo蓬YuCxbW2d3f [r4>+I=*tp#eJ9@Kx5%U^.,qk*?OX#nLLS9@פ$ @D9hkcL0&w*\sS|fBf1F: lp6HF'g)OKUb1V>p{ћZp8m]F%fOc*8 qJCϫȑYIT\(3yncY0֑\4d#v'p:QL29\p:"&)Ib%^ᥥS.1,g7I7b&' n͡n9;z~Y)zywu&<@Iҧb̘b`%s>H-tHSIRFuϼ6&i#cF1Zle)*wF-zQ9P 5CMUo +N=^7e_^_)n{xMQc+z,KJ珕#6eQHlNb9#sQ&J+DoGPWyOevi `7{0hA"51i`!k̬fs `U{vs)I>0Ob} v{~2un_b{t򢉚Ϗgk%k& b%^4aQXa/ {lbLpFH\qO =1<18CxXE B'%Bk0xD;cQ130fSܬf#m@;Ҕ~&.w>_դN-G"$t-'#O q1Sjc؇gO(Gr0 J a'=4COaI=W˄N Gt<loo?,~<Iz'YU fgk]~G dXah03|(n}{ze&w>OMM ḛ.S| s~iV~R ZI FPFr”gF-}<*#_ԇJU@,,JmKO׃|b ^|'헟y CWWztU.uovg[ULTod}RGђks-VZ2`}սu|omGi+R `ȌJ#7NCZ>{W$Sٌ<(ƌ&Gi0[Q$Ή'? T4Ɂpx~ʪp= `^8aB e6CmGGonDָ8ƌ@irC]on. oOu<]\45_O?blNn8^62ٞݖװ^gGԂ)ݓcFњfS ZU JTW3@;!fx;bVIP1C}[-; D͡7/# "!=5}0,*yq`;.QHX* ޟ4r>kEkWg sxd"=4Iws1'Ġt턏.RSb= Vi$\Oz^@/Y_O~ݍq7{= VդHux+4:?2<~hi&*ED/hR ɭ4g,"r|㥏/M2L#:8Rnzu2tbh֮-oPR&ffY/͞5!^+ki*n*c0j&b0󨢚+&9`XMLqqn fʝklSh{L>UV=}]w!/ <ԔЇ)C>g$܊se"0Sx>ppl_bUg-J˚J윱d=蔝#) aϕAQb""ꍴ12## FY}HXG"\JlB Oc*5 K)µ΂~ȳOdּd\AĚRB2hag멫+Q[ǻb. T c:@!wXAPpN&tLh [CNX BJªqLR;mTM-o ξ |}ڵX _,Tň[cQ S  &@`r6Rq_,X,`&m}NᔎSIt8ք#xTQ(4xeac-WQ)X&x^8ڦ8xZ4& yZ;&=-K@Pxl9`NqT?`hg?'\vsBhۋ?'ş_`g/r8ve<|Vr^M]eo蹠沇@ 3NO_=ɦyh~wDg  AT}xK4 Ý;4JvN(TNjǩWHc~J> ]W{󪣫PvѕڃTOWN=֔*!U[&]%RJzztE4WRu3trJh%i;]%Jtj;e &;t*UB{ztKt%uLhj;]%7_"] ĸ+,:tv Jho;]%ZtJ"]%;o.'`Bˡ+'CthW 1hj1PtrJcY &u^J({zIto7 3&Ϙ>J.k֧ C 4\ɟC(j/4{>t1VUtg*+tJvJ(|L.}jo*ztݍ{}\MRZTJ.ۜd7SvIu@RWbtjkc}Mnf7T|U \gEhUt7L.sS3gt=T 8V umلU~_g& kNvxDSTyۻ !DK.sOX7?v{7K@TR[V_wV&3_A3uW0UP+-#"drl K:l!&5,axhNyd6R|go#SRY a**g(%\ŀpj&([*< epPUU]3@; l/MJk'҃Oȇo  Nq^Idx`VX a bs@_XcV"08J D8X8c%¸CsU`cN(tct`?];20"&L D!e!x%jИ tk1hFs+%- ZS T:Ys-BF0FyyȘSr4P$q7> KAbi :fNhj[21 JGgI5H==AfdQ/__e%4Ֆ s%`6(#xD F*`+.A ljaUqj|JF4L BLRsH!|{Fp4;sOƅ.#%q8N ,).L;(+>1izwެӪrX`#gBd(V 0(RPH.jl A)QG.֪I>tkcM̨3m6~&V>50!!SViɉԼIMd,',0mt_t9(` AфgZ㸕0G-&컸jd$RW?'ϹUdI]Pjv'Q8smhjcPG)^ jjgb3D{tf#g餍_hBJv'S BAnQxi TTPtC[B Z 4BsX?۾_qDg()V %JV8<Ī7J|Yɠ-]^3A8ܚM}`1wuU6`Ņ%̆4{u d,Hw0`TM{'XP. )ٰ_Q5 r  (Ժ,>bX~oktuv*p1 Ӥl`_Q&t$ x'&X(F JH]g[4 V TW7GXqvL6B@F=5(!Ȯd=n +Z#q2Fͷ.Q5Z Ȅv%N3i?\5 :@x{b΂G ݄b :TjMPI!0Ej>dT j?XPfɦY{X +TtgqJAQ"Ł.0͑;/Pjϒ (Ez@?#}APS*(HnW]T{YFWU]%׽6 N~.`3Xo $$ eFjuDtP֬ 6C Zb@$F=(aL! s=D &23q[?-^,KT1*Tei5 !pBK.}G| S}.R卥Wbcg纫sf G 1=*T 8БW%sJٷ9f BX9Y4<#y䄰 (hU( {By[ɐ$RQd"5B5 C0~0Ft/1 JWLd:[nmG⭐32p /=n2,YߨFbΩN&dPJi`M0+ާv]D]eb:ι+`+tmi,#z ԥ$ ȋU TɣB- @abg7D4ePOExWkɭ]Fp2팠a.Ph0%/@Gm tV% Ixr?Amj DQ;Pf`Qy Ye 4i Y!ە55KA2"OռBjϢ;;kFFP[ ZY;~T*"y/XHD)40\ojL!zse_ </^O͍K1E\ϕIvY0 5BgekOak0B(jqHuutk4_Q?mAYo5kPqn?jo94p^ kJVAPkMyQ6.4ŜQ+Bj@S AQ%#p^45Pz(%oC)F @tE< \T*\ίR-̴[.!b!c-ٱPΔZ&T$b~BeN V3f !KkBE9:W뉟XD;j0BI>z$eV/a/n~tahfOH"mP]Z{ s)tSNg= R= cAu{s}|S3w[gnr˴r6 |8N M8W wQ:?'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qx@)Grdk9xM 6s^@Gx>Ov7ݰ\V.?b% ~t^n8{$?OP[_> $zk^N7z *N_ԘQc0W]C0o^n`qz7/iSd>JW'Ǐ a4M?_֮w Ѵe$΋)#tl'ZÍz[nS[-tW\Tfw燓>[Ծ2D :)/4 bL牷g~(hdv2//zB<QSQ숌Sݾh8&5&3R(y҄J=Mu2%Q&#xԂӵj7Yզr&A֖c)uq{]^Ž.uq{]^Ž.uq{]^Ž.uq{]^Ž.uq{]^Ž.uq{]^?^сȽΩa Q6ĭ#׏KDHJ@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $Nu ތ@86mK&I%X͜8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@׭\2Z̯'\VSnn{}vC}' L NCd\`1.6? 1e譣@tѦa:; ]1`NW2Z#+rj `abG+F7o gBWCWΫG:.Qzub4Е6U pEW )H& -a:7 ]1NW@IJ ]!]Špj=L1h׮OBWGHWI{GE~uxpX=yǻnCybbGLv{LnZx~uLN;sLٓSs뛷6; IIщvgVVm(PvOqT%5Ü=v!eBWGHWFztw;v~r/Z|@?~hQ7pdo}}pF<>W?޽Vt>9ɑwKڥMӮ!_DӦASGJiA G.+|o-S[єα2o=?\=?ݞ~4߆w: <_Ԃӵi6LdURp4Mv=El- ?cdgVTPMDOEaԫj#2Nu"֡s.#^ne͕\޴͋;!UvnėO]$Է%h:ԩoPj%tutuGW 8aa h[+FU0D*]1`?]1C{;D(&t 8|8Φa4 Qo:FJ:G.1 =sLW |˝hDFvp8] aXѺ?e^|?,mp+et -{h'2@OjcteЕzk@tZtpBWPzQ(vwU/?D]H]>FvS[4Rs*\7?])ΪyESKX|#HÐ, nZ6 e0BGHh ,<]10L Q$tutEX8)2iZ}LZʹotFգ=e(1ҕwާgp0 ]1\? ]1CeT$tutDW 8aa֮6<*zz2ɺaR FU=|p;3CCdσE?Mph͎aT%uv4(<~h~WeO9Z7 1,C6fl ]=wu0! 2vbl%ҕQ/_xIG-K:xђt6sѢ8%{hQGm2E!;U7<{w-hD(w-)Tb/{Fd;@a ;`.n7f (ȒB[eeKI`վ2P[w(JN ?!wSO]eqɠ,PJF[t3+ieU؈qWY\+N]I3+EW/Oq&;#7dRgxezYʍxtWWbݗ?(.&Z(XTFxaL$B1Juw603|ܳtV^slc 7#7;ڽѥ~{N.Fu}37V{f,7uX"AyeH@ͬ  dQ`0 A"C}:#zYlTu.ų=t5FsFJPQU@Z)!BR~Z= )\,ٷoߢӚ:+^,?4N' |Y Y Ny!sy]>\ncJ#_=0ͰTǕrNOyë6몔 pk5Ԭxλ[;YLR^ˢ͙fk ?|o3X{O!#v?,۔qh+/mAU*;Z ^1KEX\-cYB:gͺC[g"M>ӊυg쪆] )C;OKF9Ym'?#f ࿫r=d?t',Oime:[rk^ Т،rfYKB`IF㴆@@"62 8ƅ aq>6jTUQhH$Ҙn ,E9hH)ڈg%' a#nLx 2bɘ~Ɯ)9vkRvF-bSd6|1L_^աQe(Sg5U}-,]Z'xJ}nl;>[F,27ނ \|#c ϋ_ :e7ϖA:%CL{/g+PgrkB[Y6qyoNG3sϫr J&!JҗjFy$z/t-7aV_`ڹB\=,{:vumPp|9LRFU0Yɉm"R.bPNꒊXˠB(Hmbȡs|U~w&өՊHd^#EsTDN%hJLhD.UzOh-Q+ -!1IXO /P|N\1qƟه$dXYPv!m"w ͖+Д15Y?yi>t˩~Iq.̑KB/RCF$Z:JPxzgH"ۘ.W<ɂi#YE11M8 ;)8K2>E@ƥM\DLiQ;s$9g ҄%,3%\4(q2(gyv֘8;YiW_~f}bc.& 'P2],qh) gI\-Ijjihmy/{(&ISgZ*ùw4*),XmN0 'UOZшt6z(8)/{x:bF,{u(&AerԇEO!xiHi'QրJ J<$n9e{K;&GqJښ[Afc(`|ѓc1Td_S C. 'тDUS z 9L t$ɤ&-5cƊ0}9hLCݰuqt6kp 7UJBG\ZR HhM(D*Vɰm*dKnZ\~ @B(SpBQ4V6IbKͮA GypϚ$rL?hU$h ))(NVEфwD8m "F8G>$M.#CWya\|a| @#$Zυ@a Fh@\H7EJF`FG=5Fw~EjMI&jҫU/vjV[[mo(ć7l`-yёdٻ~Nzd lg:ֈˋFA+vU*aoA=Deޮ?}} ~(xHN5دSz@sJ>K,h`[FE B2*Z驎7z^ fc?>:Uh7VnN-z*e<3%P`NO,GqrtwO54 ҅ W q1A`&ء˻CJ]gX!(7VAPIq߲lqo{ }m,,Y탎$Q" M`y\I5D ˣ\S.5eI$  =`+ؼ hܻZqeg [h2ͦ:;mHc<S뉥ygp5A$.v؄q>7W]}ָEgDXZ,pjL=IO{8y+ VTHcy=}…sGDICp)&DZ}cV^XoX7E? %Խ@Q̮+񿇐 &x6E{OLp7.AyeH@Q3k9BD)ee1D`Ҵnn|=9C9@$&Hh| ǥ} 2.0v_9tV`9a[HY?]֨.))\9k7ZԽX;F/o7T:R*s8-K %*\QW Yc[ `e_NYyY~/|<W]eAиĿp +*ɲ͛F>ҙ7EY Ap9|~Mӛp4r/aUj׽u~C]m㷒{7B9cǴzs\\qM&_p *6S&)w 8 jWuHzw;ܬs.ִ,ТE ^>hbytE-jݭޫ$;:q7]d&fR6k}AKx`Ľue B4rЮ2Rx躷O<9ٹ 'JGt)Ep"h3uNAj9vjĔ+ٮp4P=9N~>K5i'p<}%KI)̻6@H6b AlGXBqzYCz_ɲ^mx=mZ .fȇ3)u%Qdɏ}Fwĺ 9ݒ3}P%nrrufh\(Gu)eJq/3(QZ(7 ]%2 %8 V9 &BɳΈPN\h&6F Npg5v˹2nC`HVblfhgre')"4fSYvv JϷfMo]#Z4:r- . 7II/v!ChQ.WL7Z{KEv J)N&.«9Oe W?.fQ_8p1*#!&IJ%&1P|"t.Jn2HXy2!qhvX;(?<xSP KZ=7 ´H/Pn_$DiPB H+Fee0A_T:ݪ9_hI#+ hNWw=UV(F(1G)C&ʡVr`QlQ4..x뜡|&QMnp/m|.FN:?_irR)YSh|C^IAYgB܋DZ_:2x֙LEyo|1 \S*s!>qk؝_k O1IoCpl>ՀH_7tb dtTDm2Yh( :'9cO{uC7=:YOCUfN`Y?{O\As]]kMgew[7KhpCڏ&Ic%­{&lzWNT'\KZ2SW] k6n.SFl3L䧫pвr6$b8ͮ-.j“Cջs1x< 펄byuCyZcҜctc~[j9J-ϣ0,mDʞJ?;WS0ec:dְR>7V㑿jQ3e_TbZx2Z O>S.Rb.t2Qu$  t,Jͣ*j)?"{ar/<ːnl5CNGLUz7̽ 3(8G ͮl{ujF{UVf%jE'[ &g#rj,\[/7Ż-hy&I4 @\dKYk Ͳmn_y>r>#1'?>>Ppř8CDb@!*ı肚ԉ+(r ;Is_!Z橏I(:4PDpJK#Rlfx1zTK~c*NvZJO8_.SVeȊJ寶(.SZiBKqE TAܢi IXE%*#ZXtH2=n*$SbTrKPH,*K(Z;BN Bƒ¥: 㛬~斥 Fw#?:>>|f= 84jl PQ%R 3IdS^())[1V#i`Fs,6T^3A%vDdV1t銱,ݠ HbcWMJI{n5+5.#bR hFLZjUAx80 GG5C = eMKǘsBxQY[X#gZD ˚=I#VIAZe2 "o"tD,hac`0(Fs׃^iJDG1>H%3mBPsB,GdAֈ7Jl= rpCd5VbM4$;D9 $hd*phɴ0Rkm#YSxTl'lE_-)b{2PJmC65Ӝ>&"gME V/f5&U:.R>XJp_IC0Ă\4T6@!H@ZR^hoeUuG:6yc"k/-QUo=PC8SZyCN9 O*7|%5N:*Z DhQ AJ.|L02XSu,;˵N{MPbL4i`0Pُd %!2Wy2^-<Vf?rd5|Fg#^ nf~n! 囻퇢A j&;.S1DI1OnIhDi&$| ؉9qW#v +k Nׁ.%/i޶CF}|JM6J6`hՅƋfkOmtl@Q3I$y1DХT.'.)/:doYR,JۂneRypgB2!kUޕ6faa2+vHٰϤ1kq}n:kJ+J8xsq3MyG00uq0ui;SG+Lφ g8N\O\)k'1WR~+j#2WE`ŏ\q5?sU5HiM5W/\ @W 2}TQ>*GeL2}T2}TQ>*GeL2}T]>*GeL"VQ>*G2}TQ>*GeY wGmeOAejJe+EeZe/!02^TƋxQ/*Ee*`e:]q c񢶛52^TƋxQ/*E]BLK^v1'3uNs>pɐ `UpM` $@6cv *}P?ѧw{ߩ͖wޠQGϢv x輎Q2k a U]A>~-!a;*^i\~X`%=]αRNg9: i0Og VIRVېr'MNjQd9wMƳRhW r N0A8aјʒ"u@ɼYgQZ'5M>4QiwoHdD2 lB&>$2aU$,*]LemBU.Us|_;HP*i2;rTVd%x2D ]"KyJ9\3|2IjŋQ]] \6+?#̋ ⚋S.-XZ4p9qŏ+|;S6 ,,yY4-O,ิ\IɋB1{u!.ؔzнalƑKRʈ%VayxUNwޞk֙H_tI/lE~moȍR҈dKφrӂ*5G* *& UOE0hGgw b^Т. X /Nv-[Lo;'ѷ!0{na/a\'YP(WhUL^-IR .AB e }J:gi'Pvœ7B\mUu)jhY$hǃ W/&$looMnm}zrף?Owm;gyBʻGxҢu ~ˑz-75 zS`;s}@bnWՔ^vϧ-oqֱV Tii7Øn0VlcC^4_]V@˅ˬR8rSiS»g6'[G:czȃZ#c<ˀ}%!fS*vu0FiPBU~H@HOFvm{x67q./!iUSMŨ\"0 ؾL/Z:I,stAT6btH@̔?3-i2A."C } Ai{>~^ehrHݡˋ%Rv _Ϟ.$rhٌ#MCnUvc0FgݦUDU&g;s%,.rD!TғRȺvL5x+zzҪrv?[/dpQ$(g[O@__t ]T@^7,}r<) ڶ6ЪQJ7m2Wڼw4GFT-ԭl+]Gk2-qFo <;yNub'gC=7=(INh-}A͓V:a }Hwަ.vKrӹ3h;80\Mzʅq]0W𿗛| /~@~?1N<lLpcC_x+:^D.KӐ6'=Xw1!elr'~GI+ݯK8)Z0 h%Ld,mRѳvgg F_ JZJ3 [Hib&n=v`myI>-CmXBd?+`~Şw-;Sw.. ?EMd2{޵q$BMpeھdwX'X`cCijD$%rw["Geц {~U]w$EuݽMz+˽H_=3FVC'@hхj0fdh*+YU^%Y+%KXݫ!}Mnf{jOnCA Z$;4 2 كN{(I8+%J3 (4\j\Js5/dol@ +LdXMGmY"6|(9k:;&tgb"&re#nj,CEʅVO:0E%Gbއ2ri赒D d @8$O!Ky.l*,H{(H_:[I0bǨuVɌe!Dc :lT 9e WN-f/Y2z!P(9$>ZVr:g`٣WwIjBk;&mzfB|FtW].,t sQ!w;M"St:ךnRC K^ZF24p"3k JMG7Fj$sS Rzφy tdnn>ȾH Rݭ牠j賫&:`U>*Wʩh+#$}Y1T;_9]ݐ`#%S89>T&t( %C`َL:%0tK#>e>3huf"GwV8+"v>7FNe3 >OY ,5ze`X sd'ۮlm% WC۝нpYCLRC]I~t{aJP4#QQN578Ãʧ'=18FfvVޤP k%Y;z޽s+*"ӧhYFSEPО ł}qW绞P5D@*e n0ȺY|QmkH7!IoLF{LEMB1xXKy{Ff ;8Mzϖ G1؃bF%ta!W]\6ކ ^Pa\QhXy.k\2Yk>z.L7\JڰopŬY]!vOb[^hk\] nzz歺Q+v˖֓77Qn;Z|WWW $;WWL0\YǫOW5JVdžrTp<*R2)+C/3>Gyn@n#Z^Du  DKNlx (ޣ&Qkfp`DsT'heYeν2笎@[di.9ѕS{bG΂J OxU 9)*T1 >jM +bZ%96\cY~EfLG$G4;A}qKH7uҰ&ɎfuAl JigM*:ޘ3xAZ˂s{~?gZF()rW28ʉ$m/:"hp KgfiyV^k 323g2/Bi`dh"ǥ\=,٫~>v9Òá GX_54[$ ZݯuMS/,i}GrSa Wov6^LoE4UҸ;jK1օEhhe_=E^M£*8:7р51b38X/}$]qJ*$ne?f45Mᕟ o.΂;FN9wnwYtNWvAB.u&K{:i%‘JڻF:_4 _8EX8VJNQ$9/t:f=X29+OQǓ\4ꢹ!/42_^(.=_UWyLZ0R`)HB7*ʵRV-Tx l!MI(LIP̣f<(8ŋA׍ <19Fp|lIk/+%zo,DZҚ^G"_0?59 ݡC&F^H(?񮊉Ӕ-]MMU*J~=J&1EɌe`!Kf&H0Ck5lRڈvi4K6*]ޕ$(!Gz9;g $dfFU 7IdVYIڥ/#A~Jomf+ed@'5*g9.dc>Hf E7Zap{;ng+L&R ZJJV[i#LFR$olO|#!,^GM)x&lY\*r˂ äp):  )d{nk#B$sUZ,w"2j82&#h\.<)a̮E"s$B1l֒c_6{Lؤ.pGc;dS s~N$o-nKT->@uHq@>1ic<\K9@@J$e*LNT^pW)\q5~ڬQe.cJZ X:EDJDB;2@gc";1 bDM:ȁL[-QR(EԸ J1)K9IxȮ1rV#;. Cx_ԹZG 25pQ|o|ɾ}T^ J zHV1,Je޵$B[\! dn;]`O[gYW͇,٢dK-'`lfuկ $ Fؠ}  |qx[Yz.D#}y[bWξy$FQX ԖMQ<.▱-:<y, Ony$XLjAuηQjnV_)4A0ɵ r B($<h4j|5#`HbaJ{loes fraJF%`;g}*$@f:?մ-c;h=Ӓ0:k 2T 0䒁+c.x (Obid4 A9F3#ɷ =9ǖAӌ(t'ZG<\2Vi*S; 9Ñms@Zu0  EXBXY NSW0.)c"-; [#-&ָ|<}|DNCGye?5$}QlI*Ps,-k& b%^y&L"4=yE"RN"D"$^>`~3y׈YĬ:ID0-r^#ʈQքш. 0$:" 4D~ww(k;:OkWJFlM@ͫ< * "dˇx{:M@7wzN'\=f#7c^}(NgUOC?A8 =^?_࠸u(HWf<pATkzK*A_<]:K^tx'\O`P^ FPZ#GaNR;hXqQoG>GDT KЭF Ăzήa[1~0f%pFC6 WA *ʈ*e5?QŇ*t x4W$'3^M UO-zgn \",O\ɖt?JӯGN^%qX|fʷ?ޕ9#DH"LB,tl> B匦tkh4FT\ gA/q>IIFW),(ˆS^M_Oo"G,7+IS"'!!o^g?:`Ւ7*]{+RMOfM_\l#,옹c5̬b@C h*ɲ8[XběfA,$(~.8/5]Og31yEC);09pWA\߆|0pz`oL/LB\u6PTy9l; Nsu޴f jI}&YH[ncoudLK6Qk;07<W,nqoi׃CND46v =s?0g֦ HcN^qBJ[R(TιQ)Q[h!;<g$܊se"¤ >Y?z{;wPώkDߌS FN^v@R+=W)F @QoՌqY1H_kռvoHXG" 9 p$:crTkQ<*( M4Gb3;EXjK_+/yr0"6eKEk SHf~_{DFN %҆FDB:Xa#TDhYkFiNy b޷oM"C({}iۼ:VK46IDE+tb:qJ)0/I9HOG G 3;Fs|I\/,{ W+tEh_{3v4 Y5-U5zaS)9+h+w҇+\Yf!;DW ]X@IMF ⿪H`B(Md1tORA*~=Uڄg+].˶Vn56BcrUwJ& {7eaS앍?o7 i-W[~L:[Y!{!x8[K r#ie5D]>6ɬٞP Eb\.FܭBKl*IJunoizozp%;/h\myw[n\EG_W;9pMްM`wo9k=>'μ }yVI\Y<|zlS>| d:ȕVR[Áhi>`] D BwRg_={^8P8Tp[BylWsE!jzٛPq˲'Ay^u`~?T}6|2^({ug>)iF* $n,7!l& '_}3.;LZ8hӤyZdp<2!r9蘜3 +d SwHs,Z*C1jƢoT <2LJQKo1|!MdIZPᳬA+e֜q+#3)<wmmHy* =fA--d˱囎m)f$tX<,"R2h4L EPXGm)s+tBM߬瑴KMʚE]"X,D[P 0RZDFcQ[!}<4I-ͫu=Y>)irFX55KA2V!ޝ^?26Ja%PXG§Qo2(ޯ>gb4|ٖ\ǏKu:N|y]wful6,ᇟ}}x:'pc=5^'R0p~?hj[6ܡiߥ]kڽ:}C;+k?0I?gs%JH[φ/LtUs=I x~&2TV*BqMSK4a.c{IQAp?)g]I2j=)I"XAv=)R k4"P2J)IO4y]?{o62Զךp=Ǻ AaŴYSJ~Eea\ rU%Ӊm#v{@dCOw N׈k`U4|HC#gX6D"tg'GeJe c[:"udIǖ}w|n>i -7 ^vLG;f)m s~A.[%YΆR2b(y:` .@paMy"Zm̓JhB+mN-2`)*Y &P {77Vp;ӣAgU=RW T|ɒ䉉tW$nˋs8zo]:z&t Ʋ ~&A}![)A00b‹Ln8x{m}'Hpj;x{ J#CE{O$,R"]FD)(-BPRBD*^QE-IY2z)$)D CVCDNF!`"w Sc^7G+oIIptNoY)!=u\wîW >Ezb;=YY4r" 3-g;yf@ilN6)2D'a I|襷BrwFQo-EVZV|X8&)J92 As0qnǝӓ8nkb|QӉ!ZFOzk8_Y8Y"M:'!5"H}=3 ɣ2 eX*jE 9XP8m,Cab=.D銏5h?(^Hcc%N9[/r>hR>x^m+ׁysL[^M"RŠkC@NX^R ^oWͱ-cy#k/xנ47vlJ?BYyZ﹈54xr/lNӁQcgcT. bc>v>֮m{kXم TQIA,B'IEzP瘄(V2l @"( X -]H,Ur¨(*{+9xc0qp~SKV"W:aLIQPfD'BxvlV-&KLJQL'v?E}5u7뙜.PeG ({P(ZTNALZgi*tA 5 pP $9bQ!XB50@L2[N2wCݬO|v\ q~gV닸!B -PIXiڙBU\ܙBUZ+dҩVh$MBO_?OsGw?Y/CPj8N}GK>mQԜ䗙Jݶ9ɚI~,;kv5HR>U:n,I΃TT^(jyAd`QN$HHdHl$K=a06(3<8w<ƽdo׬Éuwqw79fʗ%p36Bȵ B&6!l1 !4,-Kcg@8ͯQW){exXt8&` J'j>8ـ؋fw?'=-GɃ/͏7آ~yAׄifo:y;\\NΖ}ͮ4,\[fom4oɔ`ҵЩf92Vm ^㉝PL ZFlA8TTlzr$6 Jos2h/%/ls`p0x-b`[-|T[x.Q٧i~=du?:t4|4~zf'1*y~F`1bdb!C%o*X|Qd*MSFaOf{:kiF@- Gиnn }2;8[0s[`P-ڋ5:^ADgo-T"|`p"}! LB=k{uiX^SK##C EG-ۚE4*+ \+ R$";z?=Laq*"j`El],knj}ç9GKH9BA# B sLN4AK*J,RLH!:&=ή{V{+!fL-C.κڪΡv lu.ޘE֛XcDd|Mk,ؠ$JTZM8QTr.>]ܚ l#5_gg_#o ܉c̞QwkCkqO( =X^g־7X~jx`I ]x`mlz]I_~s;-w\UqagU֊m7WUJ\@ssl|ԳJ~G3?Ia>Gn&T4I{cYϣ$-Ryr1gA>$R&!ޙ57&+lz}ZԏP0HAh6EӲҠ9Dtt.d*+?b%֝-RS{w- wj647t0Ћ֡O#J/)*6;l Sb4UZعsc&7dBl|z>֒).@]oI[ @2&)J92 AoL;(}ո͓8~$I>[VG^,Yt>tBG2)F!2KT ,JVED~_d7]jtnu&4CXk @_r{}2 BFLjAD %ؤkQ*AZto%$^:W59)Y:zL>jm֤>5xFEt%4?%$ F zȞUGhbFo7hM*^(~i{4&_Ƅ!T͘}:H7&8%$)Eŋi $uӢ m];A3^8cCymAWY r'6_#_bA2QFhOn"gڞm|{zOus/p7D)'0XrGQPdKks=ԔUQ Y שC:[ 'PL*Z)jP0*5IȱvMM}u`p701Kq87cw5<1 )l4tKB2s"0s֤1U(vsZ\]&t++tڗQQW̻Zg޵uHGsҕ' I;nE"i8B5bT+C P@l)3D t5"[fBzOԧ-55 gj_NMbę;1b:!F_ fKNsy8yCj,zuWdaDY& F*h(f{?~̶w}3^e{0zԮGF G5@A$dф!hpUbO?/G~m!%%Aׯjg^J>O%훀(RRt% !V`V) MV9{ JO!NX:(ڦQYڊBieX2ejԨ@5l:+5Zǭ-y|70eUtOCC5ѪM땣ns%Q}o3qWι+#\B(,P0V&K,**A/CіO "AW Fn{RaU5YedmB.Ym0'6(*s T%O꺼hgy@=Btm˸P>ƍLG$(pzDuQ#cɓ%hShc)dpTQP2hngxI+zR=Ct6  (cX21r>&J%;&sEbd~,Yu 2[c*CxZ9G1N G:Mt G*yd7QdГ=oTe%Qj/M ( myDxnW3j~0?tLDU>۱m6j(!*qJ{.,sb 9Z'\!~ˀ3 ѝz\^LBױvo z5 k -xHl\t%(!䙂Mu^tl|0} ;(&j+r!IbG>ɜ{dqukBhiKǟl{g|28iq,׹/JA E71dlF-۞ݾ&=Aw֧o*tlf=lZoxWr[߶+ _P':a^L':]/qǾw"Y:N`'b=3p:an̕yS?E)"\GFXD #Z`ȥPrRn`n ^<%{^J22 ٣@2Ȣ#9P*Elg o3(]&/톗+ YH⍅vkv D =h'ۭs`8i擤]PC׈UR&z4M Qdzz<;T/׋RbrY)Jh$%$bNkk&*|bkh%g;x,Yzz6#gd߿GAd9%R  &S#*<i׻"*w}qhK9\2^EiV &}CD`@n"sCU=doнgF?.9b3nuYrxt<tLDpAeNf'TF!^f)&"{UE8Eܿ"av C :TF]ՒSQ )#KĤ]|`( iiNb}G?ӒGgaH>=%qiٞ-3/KH9RN^\ud_69HP[hdbr2Y`x1<, Y‬HYi*XTpIȘh3À gDP[t՛}a8rxYېd# ʪrd/4t&)1PrѤEvwҷ(6hه.CCM@Qn +&^ˆW^]S|>Mxf7FEbĴ)D(Q&(rJ)ʤכ}j.wXs/3V>t}%'ewo$*˘?kpy.\Lx^exv9/7c{cFq|lF$x2LPZ,h4?_.O:/r6]L^8u.9HS:)c|rkRQҽ1ooG;8ѣ7Y"%*@߮i2ؼьXq} ] 4k.FI]TZ/xլ+0}WiZf/Zu_V?~HvYj85 E QUo*5Ƈz<]~^i'߇0I,&|WWitv2[f:;=:OFG7L?#0¼CQ^ Kf\ fh-UH pzk*oWmn͋nѮO.e4mwaU{y.IPh.in;5L[wҁ;IЁWo/1fZ|Rk0ݗ^a~myo 8Y4^~3[)=r0XL}qBR9׿[4Wb~9կǍ3<;f3Z?;9}ӍܙT'x)Uql{ܠ='1aʻ>c쟯ϗ" m .Oղӭ熳N ={t0 ݽ<$%p;>:"HADcºKLϘEDT㛥vmښ ԉ3ޯ%-^_RG[w|]wQSS4FJiT $DUdsIdJ:j.Z = ׉ѫiӫ^-q:ąU/8Qi_lgV.IXAsht@DI"ߌ}|mU|3ڻ*n%$_\2,;v.[ ,ogjnXtxh?Ԝ~[]rǨ LqPYLLhBtt(|'b >w!ǻ- K1 0Sj f y& BճͪcސhUl-Gʆzt9WǦׅAq>/tU_]oGWev1iWc6dA`B2aTH)g}_u,Jj%v N>ꫮ{v vS[ܝ(Iф-MH `#-҅Lğ7qS)|iGE`%΃GbD'uv2).rUyx鸷P\AicQ; DJ T Z{UpI˖Hkp.jۥŢ|p1*#!&IPOT)KLb!}*\N d*cDŰXb>u1_4vOs:ʵa[0AIhh "1`TǓĿVĿl+Q/+GO&NMO+pAPyc s2́骹ƗW٦2d(j-W(-b1J&L18o3DB0*QYíN"ϭi!W! H Ieq,$8焧VPvgȸJo,zO;,L !/~2v㉟^~zWD7[ϼO/7r?{K|eޜ*G Ͽ^_l49m))YShIơWdn ؆/(̸<+eᢼ|0Ωero ܭP vK%CAX|^.WYbX{Y  qj7J$-a(*Y^H( /)4#lP꼸a I%(w}u٫95b:Rg ׿&}BUGuWtMY$Y姇:o-7?STy8<:%u(둯X[?郩NnZ3+/mVk(YS\L.[L -[gX~mZyCg}˫Y=4瑐w$^^g;yXz55_1Ǐ!p'C9(Gش CYژ(7`1Ok<wŋB*iЁa|vϷz=^'ciO?EO׳\E7i.ECi:p]T$,RuV7ԡWW^&_-PC?dś碭ݵB޴}mVꏼ,_-m xme )paguR|_T=NC\dqdrW2>su=̗dZ/8W k9[n9QVP/gߐgK//}hAZ?lekQBJ-8KpB`!**L']iu %#%V':!P 3|*Pߍ8[ (h.o\BZJdWB' e[=!02^  o<02q,<҂F0<c]=rn禱nuȦYߗju5V ZMb@! BˋͿRbA , QKX!)`DEH(@&$%x1N(QT:4 $.nsPTjB[ _R<2,7hqixkΨ@P$X4-M`.GF=(q,BPCǸcs(n+beLȔw?9۷훯q~qcm~gM7hڴ4lkxDӜL.oi#zV دvjqq$|3ɸkS|ز3󟘏A$ae~Q_Bۨ /BE!Q h-؄M6ܔ%IIϋ#] Eb@Qyk \cDM0%ྦ],RO)5%wfJT9͕j$68&rD܄UPX*Qr8i\S[]KC]OwD|Ng;Īu ;dz{al&~]篖[$x(f޹I(Pz0bC#!={Vk\Kֲ1G:҇Rt:k֔z.hCL$]T3&H|2B:&L]"hTX-y| ?I"CQQ|?rs]}cdqE|!2|;NK ϖԙ;`hND׉(ttDJ,( 1=+YHUۓvT{L.*E23at 3 Ϟ9|H8H =JAޛ4W՗4S'$?MF1g4}OA#9r87t <0]5CaJ#EW]ɁvzΥGt ]eBW2]R#B#]eJtQA:F 8UIo*5/thUFa#Oi*V7t !/t2u(J `7t ]eRt2J5خV{t5㽡 2ZyUF@WGHWQXw񑙈HAx Ra ZbiDkhJ# 5z`/#^(Srh\u`fh́7dc' ]vzF}5>SV^$﫨 ͸Xgᇪ#*I! %߃a^ĝY/sſկƘz>pTO+URSYOR <SAǾcYinsA3=R 9YzfAba|y[D."f`U2ʏ7^p)9kdilH%#<ák ~xٖg+6<nj݁iVWZXZF OL3eb9̩eE`奭)LW 0; cĞp*e Ζ(^X:IH ;T=ùm[*vh_TlDQT#T90Y*V7tjB@t*dt#+X02`џ WUF;(90 L oRt2\ӛVD+Q(7Ny:guߗ07tHW]Rꁮ0A*V W5Or(JK&E+z|ޘ3Z<]e2xtlu/K8=brsm!] -eB6zC.}VʮtFi#/#^"\U# IW FBu2J:BhzDW3\J2BXmW%@WGHW85= XUu_ qP"NW%N4ӌIDGDF0yeQn tuKA6jngi 5W(x ԽQ3\bgO.(5Q}*4b#e 1qvH$ NdG}5~\?{F] a2S"·A ɇ'W,%j(cgSMeȑۻ]0`Id֭8Ndg 5fכ^z8tsKUudĢxW/fS|Û|p_l>Voꅯen|̃bd(\Lq0ߝ}7O6eKZYM?ܾ4x_6~\ל7\ Ub8݋YeVc˖r=.-pZB.f{9%j^Fom0]'>XT~M8;v&/7'6)Aj?_nܣN~ΨQAxt;oSz9(h%Ogyu55CRҊٲ",N9isP5sex.hYKᩣfQ}hB>ƔbpÛhY+|oˤ{ꘫ )isډjtm#. ~<}4āxo7|lޛ=2oI_</5.}Ұw?<Нo=_=f8)DnG9DE1 )PvDz>vu(TP;z[Zu FTKA3,-<,Mb1͑n';HKMxF$gɺk-fOJ+L'_!7Ϩ\5{6 ~NW̠|Ym/W_crmv09ͯUyqh k켚\Û֬kV-ʘ}睻b:~9ssoŽ#'hhm}K׭+^큳W/իmPN1ݢ\ ^jsПZd 6uﶠW}QEgdCe~f)rux1^/W݁RV}1 mw)-4oWoG'{9Н 71%rq&mΐ)׻=یfƯ35|~ͻo =]uxm lqa|RmagÎ{?-#y4qQ>7ןkNy`2o }$~lBnmi u2]_1>cei '&R"Ff!Xd$+5z6I&wzk{`jʚmݱvÌVoOθ^n~or5?ͭ,AV.F1!'F0 JhMƪz}.QK_@DCt.gAV(JT%Nd?I?liHIqJ6]"# O!wokXqQ(.9a̬5jK%SZJR  dŜ0Aj]0rpUR͒k@L.*;jj/ vj,j5&Ⱥj鬪)0Y@R8L5$BtȄl X YMT9iNPSdegk`n]:h˽yX{#C!)D%Y:ݢlھD\!TFtJX1U0̐1L4Z)TYٚ$0JR[xx/dp" Q'pe#\W N! <'!&+u*"c!BB=q.ip|5AJ҉*sR1uN2cEEEY 3`!#OYS»l R"vXr!;3rm_3|2R1kQ.)6&T5#QȤF02Iecv9GE>I+iҾ(%Zd^&P+Q-qP䭬\吤@2*ftYDG.pBV.teEn݊JCx%@@czmjX"zFbkV@QlzZSA lg#oܤaD5XCmn< Bpy$p̌FnUA8f,qVZl(F bӪf3昼..S 8B@- ДYnsQ4 r(EH>yrE\2fW2FE #4WV)(4O[՛TM&ARbq1R `nؔ@Ph5H-ÔƢXy/tɲRT-EL+| W=8ivh&JETB d$rxF5z3x/)fQ 1h aƣ)% "(DBj6I$#r(x"gQG 7!wA IhRh L tf:KAN*:*- p @Pu 4F J8 92zܨ`5'; J@+hl8)29GK!BTcܓ.ٗ%e|`̋ U*Qu$dC4 eIـ^7p.zM`W_F Lo  `ڝ#RdڊY͉cdCq"5htD5! aT߇ 3"v3lL0O` M\p *ٜ|gwT%MU ɇ R4^&OV%S%z& p1[4;ƍ5M'5:".ՠ%GF=@ $=-kt^eJi.h:{ ȋgT}Q1 Ln)5ǣ&@܊`m]K V&ES]_~*3hPl2JɈqcU0>[{\,{?;>`|LWN-Ⱦ \=^1G]J11HrpCp64.B5BHT@ @6 R]D3ЎX3}Y.z b< dhwX@fRh `b-REJW6`,0cD8"gQ*B@:B& К5'e<`l+z, Ӱp`Di3K܀T笫4[u/GuX@ˠ&0 6B`_5jI.3 Ɛ%`aǍuFyy*|ɋ'e3a6\gBP7.nEl%TCؘ0f/4fAŬcԦT>HUD˪fcL`l-' YD,V@2CC A72dPhG&%\bЩzPTcJ!K=z"}aY-xX7mkdشjlAUg6 (4+u.CUj%r Z%Dyƴ apa9;J5Z1$6QH 3ETXT 8W䶝U#@s6"51)cs3ED Gjǚ"EH>yJgRd&C*` $LDDGkP)㞵$]5WjKrf1(:RݑU@6`(…I,gaFY֔A- 2j%v_H O#0% 7aDE,SkA=ɣFXm(18c&BUDaXg7vҘ1jUzX.C,ce&ӵ#! n p\NlI:oJcе]!D:TB vWe]ߋf+7,EdF!ʱ~[Ӌ~|BY{T޹srm{Ջ^`)f_n fl Vjxh;.ll3[ Mc~|d7vinq:g:[O_ʝe;?m˛ngZ/OvoD:1"4l,o1-k^cX/׳a}[ gC]3~t{WS-THG sͳy.R="5+}R 8] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@] ԕ@_ȃsR`RG sz6J XkWʽYJH ڍJJJJJJJJJJJJJJJJJJJJ&e,E> 1I$hUmm˒"・mvٲUlq2q*H{C%PQ%PQ%*H+aQ!\%sQ!ڗn W!]@'`*J*J*J*J*J*J*J*J*J*J*J*J*J*J*J*J*J*J*J*J*J*J*JV1tsR1'%&%UWJm@0Z]@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E T@E t:JK-g.}ͯ#\jnקwo@CJqXw "kVr.I>%+X6%.J-pK ;s+Ҫ3tpͅ-g]+D)T+˸b][6.<˦\0~-1|%#vQa }B_t<_$2ȊҊ}BCZl~z&<114µ6u$'QU.dttTGh= ]C))]qxZ-tRk2+φպt(--tutPˑuP(+u.th9e]+Du+f9YWS uh5:]JL+&"#BYWW\ Zu ] ]I-%BfCWW\ juBF:ARH3+$Ɇ5+Di3xtQݘSJc=j\ Ѫ "JSiʈ25$t>v(iqO,b:k_>3gV2*)Ulj6nIsZǪD<vFQfMGV==#VU;S RrdjiY+v -zj30ϨɈQµxh QJ]$JXYu5 4ޛQ/Mq4jC-6yjpBlZAh<|u-\ >G%Z<0 9Ӫ oWEBAH[}LIp.QsS*:`FZl?V;A? x!t2[w%@uK۳i> &k/تk/w>[7VϤ|ן~ +mM*ι.Y5-5{}WkYITUD ۪f3`^'Z”Z~N-?fXWV])Uk)$T[{ɕ|:7+F-i:ib$VVKdc♯O%84+pv>"(-. ؂(D+Jl j ] gyveǶ.t:te˟ tPtϢ+Dҁ,Iҕe(]`Y6tpm6th<]!JUbWIW`N*PJ ϓOJBW!fg84w[ 8UGbh[d[vY fZTFtet(+t*tŸdDW1 ]!\nr+DX Qv BWCW>h;BW;BW'HW(Qh+Jl * ]!Ǯ& ҕʼϥ+)4'ĮP"XgB)/tut$Uc(HWphFtyF¥L` ZyB:A5"'JWٝPtp)#/}>]!J[S+e g6#BJdCWاuCk;BW'HWmD Kup #c,NS2+h>Z˳ #ZŷҪBW'HW):|@ =8m{⑥g<#_:M>C+ɦlu-HZ~jS¹RgCWW\ J"NWBW'HW(krDl j ]Z}BXW'IWLXKSYޯZ8î&E•l&ej6륶^4qpݛ{jj +W?8!fV>*W hTZQ ;yA0]B>.ޤֻ~qlX-wi)Kd &\?oJ@؇N>aiVnvnu2g7~_Wi};>| D' ]IJ1M^i:0TyGE3Q$VDCҡNZr[1zsp +rf|&յ8#@مcRuJ.%%kfڔ (IVהב;˝^zuVi KP 痃\k!IQJkR-mʱ{/uJbĸO)ƒ \ 4׭μ]|NBH0j('_~ |Aw2AGY3^ͦiX P8N/О88_gsy3իǩ8jxUVBI+BegЬW~7z B:Mxe!!]yl `46N#1ޕCSٽ#}]'4vn~bmC{o0>;A0$` V([~5ehQRGVM3le&0tkic 7]nčvυi3r[dݟ//c0[qS[ig7ʸ `ë|Xq޺:sΦ_'BUЌkgkܑYShZg+ޝpnv7:K*sWܝVI ^h1t9&n5?k6swnʏE9=:|8&CHYAjZ}NΛg =<"答x|O~Xy {%,|$lp}89?J<\jt~iJzlwփh4g7\Ae&5fN50:'~F;vZvF9Lэ)H= &<{rf \LJ_$jf}ެyw'!f@v`G0rÅ9et&8yxl3Ab̿_f؞*~-|b>ŠL+U`R}O 3g۹9n/߾lͳji޻?Jؔt4o,ޮP">j>AVA*E #6*AADCm]u`<_F߰l;W1:PWӢjx%i GcM:Zs`[FEL1PTĜ7~omfT]quvp8`y c>cbE58aDhЦx|>rÂ}_/ѧhwy7{3{wM[2D)pc~xS11Nk*n u2ǜ16XZG/1X)Fo]_Q`C[:3+ $/ y2 _E'?p:_:RkN9]{"a*QKK^[- 0ȥh*e~s3(uO3Wc8{V.̠=Nt{ГO#_FΤ/z~T]S+#1pfu4f˄Ш2y FxFlƅ 1!%"i.Tr1 тã g g K%EUQTH#.RLBLIǠy*E2cv`b8a1^0QI0.S0RdE*sO{0Ϸ<*mi"Ɯ`@"-5Gb' y\waգ#^#W.⓴ZDrL1p\1Q$!:}LT3&dCO?`7#Y_v} 0]8AlB_%)!(J~gHJE"mS53驪.^^k/q#@}gܤ/R<}/c?#dI:N i<{g0G(%q@>G#z^z6@)ptci_.7;zK P-!5OqIxwaV Fz?4 ~jpw}y~_@%_ljKdjC%`yr"9>{}XIۆ6mMpV[ :;Wwp<})/VO/P=3f6t]L7a̦?eY1nԎƙя?Y`,;=~80_U}鏾f͂&Ycwy.z՛ }O+57m rM`n0?<^U(0jhGR!Jn^8m0V{h4_21#[&ab^hՅkdmI 4)4p$MxMKO-j}P;nv3&&;Q& iټMoqo)lDGz&37]9<];|4Q.'Rn)h!`!2NxHTirKqv+j/j]/@l?߲_AbR*)F;sBMäh57:Ebq[Jg;bV0`ZI2@7aDŽD,4bnQujxSSR-]s ddv9h)=NfZ|ءPow#w-/ Je4AhK4pf*0r@PMąD9%eHL(Em18(8Njse6 :`H^bh=̴l2ZĵLIM~χ~Qϱպ>ĒNi+e`%΃G@QX<#!" cPǸIJzf' ` I-* iMQ@t[B(cPXDiM^x\ҥ#Ҽzt{wIX.,\DJ"8HIS%Ue>t.J.2 jXym ڡ9`r3Q?{ 4\^{n i4x΍F@l9׹?!U^M iQA֊@ JFZîGÅ6CQ=w=cb昛7hm,3-Qmq}B5Ӛ`e5IIO}8]9f OVgEӒzYS$xUN MoX\Ўm $ TH@e$gHFk;rROۋsuS Rz[/>$g糛Gr 丳)\HZj}nvೆ'T"Xk"jZD)ۣ PZnm>Yy]bO9B'U \'N@Ă g|DdKdUJ#ur)\ІH'  &'ZȬӮ0 *F1hy<_bhIr- R,2 # Q8K\(;6Ig4^>mލ'8BT3U{D NjFD\Lp{fjǛkfC1Tf W[ (hnΥo\BZJd!ʅE!I@ZX(b@7020@q,<!4$O!b1rViSPty \?<<(*.xee©ȟ}:Ipayb fW ʬBiP(T@ƗV Ʋ~iA%H-_D V<':U$͂.(iPӠft0AwR\ZhqѹWʜQI"24-M`)Ȩ^r`%}Vj+Ybz*>s6^V\ϔL)jܐBj,(U(Ѣ DE%SwcuH7b%uQ G[=ɹwI. ojEż0zks=? w-P %%10(LqR9؄hF RYAZ'͈4ZSMr\뼲&wKji(Ar{bdq&(f3_C ʝwu20Ŏ^e_IWM#jSF%䋲8f誝GΆPÍ@Sk:"ױXt8́A"v0 ' W+^ƨbԓsӿZhy+yDg#12[VDx;pǓ-Gt(Tf'!U/^4>e5,>w} /<9Tk*TOс_"pz l;ǵ8B핪 ~Jho]i1)e7ڛuzwȎ{ݛo^|M߾}u7ǔ_8~9:G "ۘ6#@n@GCڔǛ504g%z2ᫌ+J>q?I1h;~6yn^xͮGlZ3Al/Ta$M*?ECտnRbњ0P?7VvRb@QC\ Z%eq i8SJNzpTՍKN3_FaWpn(-/yV@RJA%z!:EuvyL1ص&kE^ww(r9jXyZ%ⅎ=wUoN$xAD> JP\IP{0bI>V'{7au\2ؠHڇl-;$sruX|oOU {t BwiUc.ClAU'sݛW߾wEo|ɴCiMX7g3uG޵et.soRT4h!Uڔf|:8SNԠuRH<,6In'{Th)'c4h&. iC& 10&ZAe2$qBbkZ[ 5t=?ٗèA;u7>ecϺ{;dTy𱇘1Hb(jZcu궲v%t)Z)Imm4I/zZ)AkRuF?%A%i0IȲsp@(e$E*FiF6DB0*QnpR9VM1rV dXwҪ87 TEܙ8C \yQ1Da|T 8]PJQt*Q(Y)D< bUFK͉"ڗ.W5 oK?N6}QO֊֘G%Gkl`fdA8 oy!gqVsw)єo-? E '=JLNQs§ɁV*Ҡ .NIX#E5*#ZX\t.QDNchX[J؄Jf,FݚV)،g 9) ^nU(6>$'s>;^}t9򓲴O~{AmEXc{tDF(}IZ@$6\/EII٭0NC6l`;8jK%8 A1TBێhwas+WVG)rخ L]Ÿc]MjZ{`p<".#bR֚n}jUAEnw<{XQ}T֣!= uMKcĺD ꨬ-s>l8zήhbFd5"kĽF\" Z9>{3@SPq$w,*ASypX[׉Vo_ Zv/{aC=}yh\&ry<~a|MZ=!~v1)i3(kMA6R;\=}U7Z.|.H*>tՂkJ1mEInRβ:4lq`ڬl }[D05z+eِcj@˝7&sOwL}v_68Q$l/.}*V SA3"Qb8`Dה'JEvTZB4Ԩؖ8UDE.;,{qtEtV悻zUbs[^D @ˀax]޸S_f CfPbMzϣŝߊR/"`Aa.9\ ]m J1idN;!x,/v}qg{U0d{N3sjJ\S_.<[sylە71/ޞL=G䦯hvtnp' Xȃ;O'HS$F Ptb;X[]#I~PLf&(MEdL!`EhUvAnr NiZ] c~N9wX/x9?f_NV~}~cp{Е{g;O}z?e]仲+[ E̅2QWleL2UO[ͮ!P+m`ͫblղըd*Vp)CQ.*lDy"]sqQ |SP*Ú h6$?Os29( $Q:hnE}8sCϦd]䡃M<6jﳐl *1J6c=T>Lj_BPԅFh2<جtOC::hAB&]f/֗ W&ٵ!!(W K~M$1v&Fyq~JƼ: ^9?@vHierHb[уbg<'w6Iu`VB!]Mm6#F2}\Fx.ZA %x bfh}<4Wp0; :¼ wմd^gsbv7(K"j(xidn0vٵڙc!}?h|XoofOv2q,iL5ݔ^&NT:X`8gX0i D.^p%j;D%W+2TO֕~\zXWft+9bl7OZ}'\Z4j8Mn zէ<QO_\}V^ÓZ D9%_~:?{yigZi\;vmmqGҽ`Z1-*5gig㱲W}[:qjqOp#p XQ\\{} q%*-:@\ZkW}uOK::P'$N1?(Ud?N%v1#؏r߾\"3tB;݁Z{ܦ)'{}; c<( ۶}AK &?fY|j4՟if;_ɻw ڀ!H>(XRR ;:$ŝ2v[u|@sz-k`MK$9 Ҳ( KNв)xS+[\p.ຑC!9%nj !X;tG.6 'zqE-آ>@ۂg#\˵{=w\J\pu6pł*r+Qf*Q{V{DJn[Y-هJ"zcUGbV"r4!N+.ְqEΘ'8@7bVu+Qkfv%*b]"G=`F+Qkg3(*\ `a'P׍J(+<:⶝׺ l"Ww!j!**qax>vlzwH;r`8aF(.\SiffTr )V w+qj +Q\" xX0: D.u+QhbI\W{J+kL/0{JTn9_pu8X0>ap%r +Q 0w\JW+8nR\`{}Qrg+Q;JTZpurA+o; ~SJnp%jwEe0 WD'AW֪JTn]pu8! En?kWq%*[pu Vm󛕲/AI9)$;MJY5lkw8n 8Ʃ{J1NeY4-~hk L DGzq%eL>˨f]v~}3@Kو. jetai^ 9Mqw3PYW՜V->7߸+Wڍc򪭎nM]rj3dmkycL-N;^ En5M=4~{GgOE5޾]V_Njݿ毭8un-7_t^׮{ 7}nz?9~^:V~2!cgc?{.yǯz]^`;Z>trAu-j{oQiͲs;V[ =-F`J: Dmb4$q}Gbޚnp%rzWq*ZpuBp%\\ D-q%*[puriJ{ Dn?I8q&,:@\-}?% DRqOZaW\`cl7]Zq%*VlՓ*༛iB_}1 `g Ep="8rQ^ -j]`QiiϦz)G ގsɥ=o0R,|Ï_pЦנ np%rI+ F!WR-ׂ'Տ?N+\\GJz?w\JOTRG{'7^pjI-:@\"W,أW"׹^p%j};XeP W+w+4+Q n/:@\ՊzZb3rVWՂĕ t+TǺn8`B*91XcWJI 94;-9YS "a/a;``:ܳñ ~kv7Ʌ=[Ժ=՛rk[l #p\=5#\`r D.a/F,:@\I5GY1t7=S_VVi~֥F݌3B1\ؚ 1Q q~̘FF鳛:k#&IsbxqE7جrJd׿B ]U> ؇EKAPHw`{%eZk^^DCW R>צǡt^ʺ͒.`g\tp8P뭧HWΫT p};>NW2*+9 `obẸb;]1DBW'HW{܂+CW6}1(]"]E[]1CWb~vbV "]%RstT vlܤdt\M3KMi%sQR>EZW*?pZUԖkS6>w;~;Yy n1hzd4mx́W?߫Wn;=;/^i/??wGqd=}$~<6L~!޽'_\lsQ߾;uT ;}]vs`9<%7!cu\Ϸ&><#togbJkq_Nw3D5fW֬{Ǒڝ͇o+Ypƭx_;*;<>5 'ww_g't<<!<-jp])̏@0g7hmzy̿]33W@/7}րOl?+gc5sG>RN7S/zRB^WY3YcO>7 ^p7ګH/z{ |͟5\/[#\YW clA"y[|q:+GVdͥ輹5\OJHbuZ*A uD*7E^g\ȥ4SA8i;j>ק~Nǩ|zg?-4t|m͆>TC@17IVZw䡊E;[EEIvWFDzϽ#BMTDzo%*b0kft%'h /D\7o޼1a8 : kGJ`H sEb>;Lf 0kD34p' jhj[mU=QKʇ+h-A~mO+ AٖTbw%>,dBuXǖژ[CQLDIYـb6tgI1-X3R ! l[^LU. )ٰQp`5hS5B-es 6lj);O-`HQGD`ܝV &E0W  MGZ ŨQA"R}W:AJZ2$߈&#L! #ͫ詧%ٕi_t͐jPob!8 2P(SP|"(of?'X0,JLhW4 [QU0uk)& A'b*U>kttP1uK5tPkJ ul͇*QMux+9oL/sAGm[(Sѝ)E4GRF=kUy'D)J6N ]u Rq*NR#BTݳ. R^|Q3XO [HH-6@AI8( 6C ˒ 1P =(aL! #A KA^,KTaAqŘAQզTei / 30`zzۯּoy~`o3?8uWw FfL{l! /M %y:dl t56b&Zm\U&1:#'i.E ][ ) >@(E&rZȼb|dc1w g4 Ѽ<=K`r@!YD*w<o 1tb˽o[mj}w r٤]K/7?Mz`}|a 0 6z||տmx볗/ /~Yh~pnZ];m/o~u>>6:~s*~jusgbIh͝?}8Gwmܮv{=26_5Ua\mUDlz^'#.G_:zJ@V= ?V@?~6F=fc'LN L'n}H' 9G׈H@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qȪB Ş , d\Pq9~'P'):(AH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 tN kBKrp\wn8,Ԧ;z'_:E'M'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q/;:. ?imN M@Z@QM'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q˭enO\VSnn{}fC/ݵvwwo0Dv[q)~9%]q h9zP&+ƥS0.}6*,g -nK+FLI sPKRW 8ý _NW)+Jd),p\]SQWc+FIR "]cZR1ɇbp&J;]1[w^:Jb[ +R~Q:I p0QW seѺ!Q*+| Fb~1K6R;LBWHWIZ1+]1`CW G(o ]][om,9<޻}k6#fVww]fmJ3GgVc_?6 y9ǭ51+MgDg:|SE>Yۭ_QHOӏyf~\6=LKт9Vz>,9z#cEC_ ^^ϰ ,kˋrULFy~ !)A. :FoU7EW&7c68|~\r}U~(VXʪ0mj6(׫/eD G)oV% ]ws7lngz?探ϼ勾s&Q1  c)&S2©dA<@/Q6J>E%Х،C1U\!2ͬ[Z&AUYh*{@p0qF1VJĤΠW#S2pPMc :(A yY=ʅ(`\QEMeFM'"zuN੺؄+ټ$ZЩwR` tEA @+T!몃ʂtEOm{ZLm[QȺ꠮ Zs ltE %I^WDiN:X%B-QŜ*+&BTr3bv}YݠԕWJ|3 DsWoo/~~z4mM \U%^}/NmmkOwyսHl3_ {-g{z=/6 {?rݻWYO7mR_-En^&ANg@cU]ԣ ](]n]uQWJb(``+EWDkD"J YWԕ:$*؈lT"\ \tE6 Q좮Fr6'p+"+̯;+WK6"FWH{I(en]uQWlLK/?T uXM4;gG3thiHLYHӰga on{Ӗbm75-Ou-*(}b;p@ 걡^bħֳEWDV8vͺ:0Vi Ӝzf.`rsʣSEX24QuifY#)s8Ћ rsdF=#-ނlMp&wR bAJᶽF"օuEFYWѕgpuEe]uPW&X޴qFZEWDE"BC9ԫ7G|!pyQ1VmwzZ\awE*ߞgb~zg'6)Io?ݻ|'){%{"ιSI~(@il^uڲ8/H?ڗr1+fyU8&Z.<\y?\joRI+ C0@l^֫+9T_WJړ>Dְi4\MHڻ2R>^M`Ap`[tU˳]ݤ2(!X-vV cCKFO`Ig1&F؇ݕ|J abn&:V#|tuQfyw7A6ު CuH6C!:%D㭰*HPuJ?',(u@RXމeӐ"\o4d )RlHY/tcK6Ų:XϨ|a)WL7<5ZC%guwJIJF?x\ѻ$J״cw6DT*v5JIm8`@!qkծ]]QYa?}t Qb 0HR 0:?'(TU(Q_Os [_]/ʼn`+OC }.GQoS4r@'GM6-ѬvKU1.?|Bք`2g3Vz-Axdl+9V^!!)>"Bpy+@*D >[iZ銀 `µlF'?y(м[vkPAqkNcMzބ8\faz2kiz3ބJ`IFik[%$֫vtGX{LLcC/t2ҕJkFWkApҶ,қJ bO)Y.tBkg>˺0&䤥FXI&UrT5mf44quqmTY's{DžN#l`&`t 7H.]lRbҹ.6hppNGk#H:+mN `.BZ'D"J[+ctGO2>x+Һcf+2w+k W*6BZuB )%$rVtEαEW΂P2YW'ѕVN"uho] YWU$WA'Pp! |4m 'L8[QVejUGOBW6BW6걡N+銀e+u6@BJuA])焒tEtEqҎΉ(Souu] /#]!VWi](κ꠮W8u +6BV#VuEZd]uPW#]!pЎװ 3(Ϻ꠮B*F"õlZWDo]eU'u0:Ld@\+]R(!몋6{g+v|tEmEd"찬*BOJRCFԹv2s6D04;>C tֈGD0ɚig@MC QbpehVeNlzЕ˺zlb*' AZ6"\hM]WDi|U'uEWJ=dԅqӞ̓y/.ݢL/SVK4xr4,odsSuZϦMAqb#2.6Qbkgt^y#H`h +&몃2[ix Z \tOB`UUue $h>fO+l:HH]WD.yB`+p Ѷ`>2N+D`+𒍮W|K^{W;."Z'SQzuA]r+ltEs}2J b+֒]S(SKВuu])tEƱZ6A6u]!YW]ԕ*N ?j."u2YW'ѕNZNAV2EWDkNf]uPW^ )8Md gdq`hA+Թ3E]N{F"z>"ڶ Q6F[:+pbr ZD074T 9?Y0Y~8`׶pΣE뎽Dl(UbcCCcC/AjF"`p+5¥+YW],=]Lf(+{?] l2U{uc, ń#Q5k,DŽ#QQv9ʉ Kxlb&`˧MmorE&Jr]l\0h=esnۻтI]WDmUuu=yp^Gq^f]uPW؝X >CW M3H:+HWl$]eӺ"ZRQ.)f+RZ6o'?)ȝ.md+n+8\f-Ѧ(}~E]ZN"zB Ə $X68|IW0zH4wvkټ#;DTsѴވҨ7 -7õBHZN2Ĉu961V"`/ \tJd]uPW;(`%ĵVJʺ꠮ыFBtVuEf]uQW(+ͩuދldB\ɦ3HZ$+4Uu # ҊV_ƒrvšS|ڃ;r/)5ſ>yC ` v@;-*+YKQZ!l.~7]..{?T, hP.ׇa7V/o|Y=~3d~I5]dQ~8.aʞ}gΎ(h^ bvK|,K]\7r2xo^}k*bzG^S'?/f(0ЊQY{߼_z]ϻ h_D䬂laǦ,ԪOj<ގ'ٿ=Cj_>Hߐ(m.f_Eѣퟢ(&覫b4{b.Frחt(Auշ1b8wBÉZ7(~*Bw%xV+H?Dz7eőﬧ3Hc+u̻+0Tno~Y u~s[]cͮ&u8lO5;9Gsˍħ4&cD(74*D6~Qbe?(71#0XJ~ek h]+?_UCѫ+X=Oc {c1^bޕ6$bewv^G~nt>l 2IeK,ɮc1}3L'I]$/ȈȌQvs-V$H\[.z6D % 4k#¨s$,\gR4'*Os&E8l٧?wS7(B_cq5!,a-a_;7Y_aWKSީaK;L1!H*r/&@U ߊ/l_qP{f6$N.}\,&y8Z@`G Vn`~ {!$c2/q5TBGfM/'̛YU/!9~shOO?[+y0_CR(uY~-N0|}N2 qF@y 5`4d,I̛TBb)p' Xvp)ki9Sܥ CKm*tqzm{z1V![kbcWco^Y2U.yRV 4#d^̋Ǽ7n]2c(O/Ke3jwYn`lXqXˁGۗS[w&ކܖVVҲo/ cyNCL?X~uz%=*9߷QH1WV/ӆ(OR ;ՌK0KͨD)J2M4S*iI.dA>Lnom~`t_$H轟kèޯynSk43f'.ξ|]>PǶJ-JjOFԐ*H48 ?c._݌]^D(z`KE-O yȯ Z &᧎&y$bGzA޾\Jv%YUNf,K}z.1MΑʇ)@adz2-8bU/֨' -Gpċe)?ZJɠ{2|"`11.wqq+/0LATp4?(7Q^L;zqYsվ;%oat&u a\.Z!30xWb#hJi2v2rzG9s1vG#AYvb莓N>ƳeXC[\<ų mw^/Gbtm^ŒQ8bHjSI Lcb[3>7OP)i. C]8 iVB r }Zk$a)"!i `c`).33L`U4BiM3 ffWʾLJF뼰bL{=V갧‡aB3hJgQ^EX3$9"Y*KxL4<A$I@X* qʉd$8Hԥ@:xݩڧMꗬS>~FBEvܾjfsQ=PG*j+ּ)ѾuԚ엢貓a+E?\E/UdbƋ3;g}zζt}kv%?Vs*Gn )j +n!8x}黳zI1_ : R{/v*zvv0a2 G -gY;s j_\8XowzI+W?w}ѿ۠+ǥӔE$TQV#;\g\N| U|a^@!I,qq.es#<i$ Jp!RD#Fc}p?g %`tt %4Jd?Ί^)2hٲkpuNrs0 #(@;5~c$iH5K xE7QvPJpP6/W3t"yΕPQ$#,/2^ox:,6_O'Jܺq#:@P>=6:Q z><T]̄y ܭIJGGwrG-N/wdŝ$_+/h}{_.?vlYqqTlG~ec.D6':Y{ϰv++vlހIAرvt.oi~iR.;UK\j;pP?g;g)9h9n>|dwcǪ(7hN;+MvϱCNMkuw,j]*_mq\~.rC/&K:q͇u}SCpu@jE]~NJ̺$nX Dg Ǣ%0.z3>LN&^\&ՊXFZ:V>MnG rhŠU^LA;04Ti)C V8eK#yLR+D$0 c^Xւx 6vZ/Ge}p܇`4l&Lk#E(b-jKJX8VGıZ, ZpLgF%..{]|%3D/3d?]bNX YFDX @HshXIgΦ pW=ǺQ7 /. ֻ|]tL$XzՎEĖJp|VǪ%Bت>c}e3f=V+]ܽpI",G^Ly ͛ /Y>&ɭy4vЀ-B r66%W bh?5: D@m ud_|lIJXFmhDE}Vg؁}g&~mz)–rAXq+YOf_W.\ ˁy4+ד pYACbbH\DG_Ƚxڟ4E+W0Xኖ{lDKE OfpeCAs,%=HHk̢@ 娈qpꋈaHX6 Tݗ`'n<ӧLW o NԲ5ûmauc:hh BeJz?%k'ʋ`e-L_h8NfviCHz5Yfa &ٷʹFKAp~Pرǁ k7~LuU\U=.n0E~}c\<*T4ӯa~^ a0xȯRNyo%h:wJ5q(\tͷRtꋈsHx*&"nG7!j"\0 1XŬNτ(ϕP2JY(Ჺx78M$y߉4 BYoBqZUfiۭc, )1y[RCjPI\qTQH(*}"ٸ˾NK(P(11( pS1Soh:|D1?FwvRL[eS ߃ljWw&Ta8g8EiP\6ʁS[y)Z HBԸmDЫYZ]z$ Ewt oj+Gzg|v3C?)a t5%}JʼnaQݡrQ蹮U*4R@!jZ眢 #\֢RV\,jO/pqEL<(B4Z?b? * %oYnllQ>g#T%v#K/T%,iGLI#`8H LXuסK|u!(_[z_d,;\`Ty)"rIFDDrB8:5gJwyQeeNҸf:\NP'DH"$H9nt%poyEx[' ~>nNcS]WN (ͅ QeB ?tNu& ,jis>Gt^Yihy=?c"ZV\{uk7@YΔeQFK> pmidjljd(Zjϴ֍?b qiStѿZq\0hm<,z$Kkῶmэ]njmh M{ 6;^a3}Ïj/dE24Yf40qJ oj-LM 90 ǍެhQ*'@5< |ύ[棻CY ]Cp~_h]67A=Y#E3O&L*rTkZ /P"bO|1fr! v]EU*bE֖dX=XkD$<} ߫487 \]W'Тt,Iо~v9Q\d$Ϣ4f1̆wlC|*SԦ]2fEshe/OM]?o@Hq%ΝȬbj kt4?|W[ Z4]6)LC3UW{s -$&1@BnZiF&OE'JqdUJFo.lڵ ?" ^߮6CgtCK*@vkQM>] ()o'mo?U`B)Wf#v,=jAi6>p{$FizLIλSvA·"V8<|py+s<ΟIc|E#Jk?FA}`OwZ%w>-UB[hkח_o^d.KVNW(glT7H4@\J@:D:D]ڶZ\{-uea|xEAꐴֵVFpaAo)ޘ4ˣbF$ TQyRV7xeuE_aP^jQ@A6rM:ſOm2O!ƍqk8"xPgF,+t?G8pmwa?IM .4y635(!n@KtawZf?v׏}ƨvw Pcǃ(r=Mm$3ϏN %>ޕ&(MKf8Z $ ej'=*{vB.Byߜjn74R (4ӊrځh@HnӑJFe=~EF(F>~s{ɌB( rh'x}; IhR [%X %x`Avv QmuoD*o,QjX"(X]p*xCO:ô,m&]_ OQc|Ҟ/Fٍ8/Gqҡ w}Cbi(E3BDMk`wJ-MޕD>!DH|l5=M+ZLZ]%LW7ܶ_Vv#~+JO#utdrmq`1\ {[Á3Uy{~0T\Ito\£S'j_ B8ߕ:QhQIgt&EdwbĪ :/A%5Yn֕*VEf}.g?OK;pT!tg1H.J-^֌MvBpl?u70Fu7/G܀R!dKi2N쁣 a^G5{#ta!aZ^c]|V,`M+?M彏awR3q;nOb7žF>k֔O(\ٔcmwݬ7+\ @﫻Kj4>8=ybAZU$r:]8#M3K}&R'TVWxrD/PPb.HfׂsXӘj$yx|)XPQO^n܉t;&pyKXrP=]Xyz "cmճH?:aS@x/RPKP{mw&ᱬm+⡕ yJm[H@U VNERSv\ kʯ<&='.,0{J)u.ǙnQ MeVa:7; A{~1] -oЧraA9et/05ex3X@_cx3ؓf3-NgP &iTz!@M]ʪF:TI͢Y`e,zIBm8xҊ Ƥ%|DkAT!0ʻ=ȁBxiW$kT򗑼6@:"e ksoZQy 1Z0M.Af-:`lZp&ԗ^1ohW,(ZkyR f{'݁vh‰5 ]TeАv9A}@Fi '- A;lu# czEumLᢞԖ,i\p`Uơ)+A ҿ+Hf_(5€OY,USa: {so4&xڟ}E#?ee_{_fI '̲|OC*Q}pF0+'v.NG-=)gEKGkWEMǗY ~&2xxV(GI"J?iv~Sk aDR;udB*PayJ_:xP+BϒaÒ`iZ](L%_BTaA=Xkǖ eW/n%=V]ǁi>|=ڢ&-PN_X5ZD&#J ߏu}wT# V&~.ؾ;p7h-LƢ8%HbI I.(_J;\7, !1JLj kMܛyXѠXۺc,3eLr;oq‰s-Tc=Q٥A Ys% Q1dԽh oH|B$is0<1\ID%T*R90 E%y ^*K4%%A\C {l6e]zl؆9)k%$͵@0'&%=q&4$!#82%U _FjZ8 ۟^T6lvwrt$!_hi" 4vG.rVK|>BrcX"C0,M"ȜSs:惭ԪɭGQ*kIdىIT$v[ᰋjceJN_@9@o.}>篚MW~QRpc+iiZ=t؟UcE_8PFRML5+ֆD4׿?yE !cddaLJUކ~5?_FKXn6.Dٗ!}Le+9[ _uyNUӽSL/]?^/gq[T˦v዇M-mmT{t~os]\/#k7l:r_yL}X/rc&Q-Np3v ic7?v->Ύ׹bm^R11 $I,c$98FwY# ^aW\"hK ŮT0JFMq0QʶZ `(i%+EMƧ 'kyH&6k}o&*Q+d `QMŭzbFVdj.VViPŏOz[I*LFTD_ jzפdx+\;?jtȝ7^7ˡ5YbAgy2TP%8p VTk-\Ou;]3jh"C[ʻ9dž `eƓze̸E$(]=ÖQ:1*@i+ o9Jf JNIQaiHI31r#\O'$փTGf*4U(eP bB,@1SrP! cDBx:`b$sg̓$e zɛ(GPuWz6D X!c/d5:Ǜ/NMҡvb?G h,HjlN9@h4F,35VT Ǣ捑l^6ޡ==|rFUއ%T o [akDBIyΔ Fݩ9@QMb|| F%3L`E!UMNBD-aɶeĖ+(Ed&v2/ H($2d$ &ZQ. h;^Pr;5s-%4h0JKh&0iC޺Wzʨ`1`}i9G ÒP:ա.Rv,l쉱 %IC c9Ygc2!?,%%(ixtBb-^c{; Q7`~^{ˡ72-Z!)UI;7b>m*eHx,ϔoTipo16B9/ZssX3#2i`m?#=,Ga \bɄ6UQO s"&Cg1rE)U=T"}.To̱{p<ҫ֨91ZE (Cy Z5LpnxΩ@oEBihxߡCBP+&Ip&b\Onp2pf}299DíW^o琠 WGUYړ}.+uƽfD#fg 게0ˮsSfp)8앦E,^ӞOGՃKOkI4sZj-oz1ɼbx .p{ ${;gҫ*!/ʗ 3`|;+g9bdւAalrkEm{Sj^rD%" [8iGȅCIJO'z,K 0{c"DhoPG:lLŅvL#._iB#BF0yĘ t"yӅuZWgY]?: bNaW.RƲ pS0 t4%˚ϞdZԔRC`t1˧ʮTaKkҴ[o]"ZҌp|t[e>M "!^%)* @)zf֛h,|o}@fMTpO2vsk#Szqq1^m4 :oՅ+dk{${\]zg 1gBIeKUsT{Q Ybd2z) 6SIq pDU8WrռRg$e4f1a_LB. hI`k!%^>ځw _ KƳ95N&hҘ NCaL;L17PZ{kDBy(Xg/2 ޲%6*g"~ 8::fD6 c=.n#BFf^,YOz9cӻ|)9൷W -̵#@Q8Psq@'~6g{Qǜ`X5#hhz`ښ)=^\ete²˩%EF|2F/aFnd2{QQ,w &~O&22z`KW(J-ecO0L9Xr i4G R`o?_p/`jomЖUFfnmSV2GQ=˲״j{.~OS7ڿuoƳ?>܍Og^Omlh^Kiq͌?l6@xŹjt,<2rϒds#;-ܭ0+-e|y>~X>;Oz1~34.ޚ1(+0eb׻{,|E^ʆ>//u+g|ao?Ak9E90NriQcuwr>LJ~,pmf~ks'g39˘sֆgP'6P@)Ͱ4Ìÿ-|#"ȿb}6} 8:#_é PD<#soԀӓ7  !s!9 vv{ %iB5v@eVx&lMF4dQIB5-Yi~,& ?0RHCQF^[bڞ2o.$֟$D0 lMXYl[UVWziSU{))U=x6v%ڊo陼yYVNp]Iìbh*d4Zݩ*fgyEQ\QԼ{[v.58メ9Aq,`[s'&̨|=Z0zϢSx$%f-SXy/;am6_2M!,FUN3ϊOzzn[I7%^W/)PsZ-=Cqu 6j|2+wO-^q ) G`X+mǨr=XvGHX,Oy-N*,L ]=v(l⫛kh 20 uavFrY[S 90+8o,˰MU(s N'D8 bbn~=J< I{6zJ;dD[ IL9j@CU; .+^_d;GzzQӫWVYJU{@Q%wTr bSk0km Û HR^OeKO+*i]~_nj}_C2*x0;|n_X8כZLN&׈:QӰV7CU:2X ;ѤCɠ|"noM' a[,X]W+f`w̾*C̦֢z]C p33xy2Y/L6&|bOf nNʇЈ%U\WA1Qq\UAͼ (ga_;cЕu_;g /jh 0qЄnRaShW3!Co7*n6U„=1sM?^54dĀ-O{{ 6/b4䥛 ~'->co^F*wah3kGG޺3幧2{C1ZŋMNZB:CuX Xnf 18;0"^V-vb (@Å}P!t>Kj\#˛@R/ӌݪb0ajQli8햭߾M VCt9ʙ,c&HLGˮgpާ/0 ՗"ZŎE=Vq/v`(꼢SC++*UƊ Dc%PC$byo\2,|=wwR0.;~0~3\l1&:b+װ9n w's(A㸷{xurѤ8p/= *UH54OҠS/XW=rZ=~>?~Griѿ?kdV~hCp9Zsq6ߝU&Zwn) $Hs7g61ذfujAƛV ՜"Nn9}\8d 8Me{Gv\{bXuj f@ Nbg͖'맢H&MI1rQq!J$X9A\gd kU)arFVŝXV {Z;ܴ7 𔵋no*5K7X\JNe&hwmJ> 7Ded5҃7;Y£RRw%ʄI^* 4(i$VFd=X>*\8X?t<2H*O!b=KXS\! |I\hn  'x̍UءUxԯU1w[ ܞg +rR0NAP(Uʐ %5Յu˔,&':_TpYjJ-Րiʳ&DGnӮM^hCDGA4/8$t~%Lj')"v+fnOfpT$ CV~*j=ʫBum!ehjt T 2C~9pVP9籠>bXhO"6p[kh 8o~vPj 3/+lJFE>;K;5jh CzT4T9V1 i(Fd0Tʭ^S]7b⪪&öJ6f}p .wA,xޝm{&_y]P; ?9n0GXo]XYy܁K$62}j {fyy̟:dZN/Y}V .V9eֆ_-yaβ2[]O|U8h}T=T3Ũtqh'/sxlt\:1(a;6c%a3釿I%eDJL/vF_G~3GXOY^0yD4K6OsFeG[(]TY͒"gji9ʖ#=ޏ c(/gn4l4=vw}͢IclWh:4PӴ5ƀ9F/ 6{D+?^hBB׀\,Laic54# $誁L2lSe%p*I]N#ƀ^VC>5M&oEBҞMp}= JDĀ0;cϦo ĘUU=,?ooOH0z;ڪ@&R%>ǿe7l9ׇk"ʖ'SQd:j_sU /^`2(:VvUiX¤B ;)(1zo=\# 6B@P3WQ,!XY EL{NFAC X5 bE_3(4b^ zꕐJܓ%T` `vv0VkMU3aհ82Jx 8q/_~O}>K}(묿?wحRR:Gt@2(s@M& #FI+7k5)˶/EbpB_$9_ٖS?*{5?`jŶ+1M8 XanԔ%Ns )mr)qlRZ^_YM7 h6T3x[Y=H~C){+)ЅENCNM:v=!yV,&`\^oQ-3w֍!(5$ ;hz<܉wv7& 5Y1Iq|q`~BK%ᆮf`4˳C'xHv̹y)JWU&*ZfF oR906 ։u՜GqɅpnsU9QvWtQ&9YΓJ W3&<'N9rT;DSe$Վqb$K ND)=aƕ6")OKsa3X2OL36:PXUҪC%a>u0s9h:ho ;71]uUoRr+Sa5c,ꢫ GtE~AVκoG"\:Y7#Riyg d͉x{B +1ȍCcՈہoW&9žn̲?>{I;ݶVm0z'M *O4B!;tann7H8Hk'I# c=6cٙABHR]@^P2P`[\4ݠvt;<ZQ~2a]9%\hӻg 7tܔ8Jnԧ]}W;|,e|NUg7edMY~ꑎedR+lkB3s?kqDq-d <0O˳2kEJ\WV%P5QX+ZlH Wfԕb)W4 jF2 (:9V/kN3v'֍ƭkً̉}E-E.J߼HcɘJ"d4 cl]'cNl9kZ!aI#N;u_!j'UF <%ӋnS-:! ڕVNJG- 88PaB1> xғd4o/PzD)՚j.^أsɡd;M+XddL"Ҡq)dՙ*݅(<{s|b̨_K0yUI+D=YkLcpQJSBd($`]drm/@T:D̨.۪Ӫ0霒j L&|*iѶ1T}B"SJ!9u%f.m[w/R-1uB4IDH'xJmZI/0ySsAYgCʄJ * blH )"7iBc6Yc*d\:פ' Rlo I4HARiAek=O)~_d;\ʣނl:ˀJC>uuv~qvp:;X0;!6DrivSf{񢵦?iq_|='ؾGix l|#A|_9)= 5 &&E} <ˠvA;?}C4iY9<y` j9GkvA^=v(=Ў6aACj[BftcR;x$^9V]GmĀ{ y 4݋dvOO̎z߄y->qa0d80s":$Fc?vPf8ک #-jzNc+ {Ka0v/ڙDj8V`{A5 Aۀo0{S;k<.jv 7nPǠjM=^E~F'0 d&hpSI'WڰU#YS"25jRhL!czq' c,p-$+*%دPF5W$zox[-ުS$*ZIÅp'v^\ɕTjx'ِ)$j,OBsOUzdlv9T>ShQeI]Njʬ. H.ŧwVXj/rSrSbQͺUKgJOMnƒRzp6dЦbeU'K2`eH|@oZ R$rd5_C7@yK ťV'U|IO 8 !瘛Yf&[Wͳ:NڜmۮCCVd[H @O̶K\?.|Y4byhA/ %bòX1vN焌;}QdOi.d-"-6>"jwE0癴wc:xXupXg729v4aDi$|)~5bD+:^0:Ţw F|Fkkd>5Zzّۈynv#hkҗ,h|yE '|@2_/Oktg./\8^Ϸ! qJ$ݒ;Zmrˤ~/ebj^0AVep z}//m~tD>ΤӾ7"&ge >o|ܓ|\F{Dž/xkl|/K@AbwP%8 xkQ*?;%y{o2RF= v$=Fl1O,\#ܠvG>x]j=Yq҃ͥE*@ 34Rfs_qm\eik<<)i$̥,?^.hK&wa{d]ƆG)]0? Wn1}2܂XưNށa6p#G} ^퐉s&t)PJ{KY`2"~rc8$8Kcҍ)ײ%}`nF'c[< _~TgDIR"8#sRԹ"3>VL\&ͩO71+LlrwO@t#ˠr;3^?8-_Ln6f.ӻ4Do\9gzH_"<`v -Fum.,Ee(4"Y}_P\f<>x2&Fj\e5Jeyf=.ć;T۫}~tq"rB\9_B8}!,ٕu,"ʆq,B .ʸ7.}C]xs8z8Ɔ7=_+-cN? Y$l 叧sx7a cBQ䘌':mփ#"#oKbN`.}j3}.9<(V#-ɓ%yo1{Da>&?.$|qb.@=0e52]Ybufu-%ERzessM/wQP ?I/* nܻv&Ƚ*x%8mz9-r"QF@}@64sBG{x0QL9=bo3vsϰۇ.Lb g묇`^2QISeΎ8BiYd'Rp yC?<-ə'RG{9 w#6s.6#:rػa@,J }NG_z$ TK-ئhp s,(q>7&z !vRWb[G`t<,LvL!dK(; Y.y!:Bw-ږ97AC-c5G:\x8mb5I(5YA""&'kH`%sw]!`R]\/!P6W|Y(mt(CQY$Ś]m8Bl!'Y%Dsssy tdn? _|r,L)EnFN2P 0  FHjf%I沋CR\ц{0@jPsfA=l%Ü|nhcs`O wuc`9~@,)^B)iBVF8?Yr3HfS'PrR/u )YyuaT鿞̪֌>({M Apdz)WKaRذ"EP E.Sxj@;alA S$uYZߑ09IE:d37Z.1PD=PL~,4 w>>ݿMm`01(Vg>0&oJMd$V۰=5.[hBs~NZXo&]"?fD9X턔8#*s05}I5Ny>ss͋<jC<>ȻZ>6}6wom۽or{͋8Q:DA>:,=˼Xv,s^C+G"N!g7YÜ^ɶT4h0CeBQLIvJdk!ġ[j6)P]@B>c5VjE_PRчaS ~ӗqSZF'Z?wyqѠOlz7FA dl*쯏kmzfI,qvT֥6xڬeG#Cwf^+6 !:W֥/'|ޟR/޼ 1 AOfLat9%|Fj1&j'ȱzi[U BN78$.Cn<8 $"RF~w f]>6(凌qΡ PQ@C ~zɴ>LtH|~MV'vHa-f1zS}/Wk$d]pZR5^-^KD=x̀@bZ3ϐZ--QCin^ AD9v TӱjE >\.3L@6Go)ScNiss#N2v ۇ:ۘ3(Dh}C)*4`&|eutGn6Z(hڐ ,-Jkq WL$+5[}XVkA$<ʎ#βß -Oݬ m\;ZgKɄafj[7ؓ?T}ha!tz6 @KT79(a0 *sftAxR"b%1J͉<6m:M?RįAb(`8Mb 8v~h8@k?!|9ܘ"!V7$"Rk@*87%eR\u$N o8шQc)`A $Y(r1Ix##DvDٶmP2ޑ4W,;"c-Zi͊ d͐۟I6gyzT!G,u?R}*t׼>Ddb9LkKM@gL@tW2O{+}SzsUxSj*M3"Ok5=]3ޥx7/ p(y"Nr3_8gj/gA'nvUݷn/~f9ےY7]9~VCN©,pz{͋ ,i0r$txzgɑd&E3rgK}דf)d3~9l/fK6cf7Z>=&>/:ଋ/s},ܴMA3FM{5/:Q #ԃɊ?:Zr3BQVz_d\r2Jj}-oy1}BFOJGt|^%G>39 _j!O2YZ\ G@$/ Ao¼K8xSzß"zXt=2ϧ ۞8+ʳDϽWJ%쩭3w͆,>},uE??W\z0}BoJg  rֿ ]#~e.̅ƴ(fܿծrESdۯEM9@7a|L̔ve}:(3qܷiNB摼}1n#۰ppoaaGO jmobX^ѫcGj 't8dJTtvsы]ֵ aB>ZPvj dԁBy\ '0o$l{G}Rd*⊃sWCꚔ A#j~S sst@A,0^t}umLRp19^-jI?r,ג8 @>1b}T >gŇ{۰H.qmuzgNL,ZW@Lʔm,ym}OX1)t*LUTTtmpɁXڐ< h3PqAZ RtZg05k05V̈6h}8<ɯwHLĀo:۰)BFC.ttdզ)Ȳ`uSl\.\Gz C&A6.1ke(諔jsbbcSB¯u8"_GkƪUvޓ/z#Ȣ;bפ}zU5JeQZ ЇIcX֘_RkM *4D@#4(7 SЬi $!|1EmNlllMc"T<&=뛍Qԧc)ux"XF,b:"~"|L"AZN P̽&\M.$@~|f\#-YI&$P:e6VI9aHÔ8bːKeJ ,5')Qʤ[X$rs۱M4Bmunb<_*6bVlb|BDJDqoҷiC"K(y>]5.&b=2z`E2}˃JbLRc՝&!,>rrt@ܓT"CGW/9cë5thz>$$4onƫJ-$ ӦaQpRQMWRĬn԰eFV1x )xѠW"\ā N:PklUoQB JT&^TV"3sd}M&"-b =k](̐Z*F;8TWg/(Y()듘{@x1 UQ ;ItQ9 :;! zjzOYBq5 UC𱰆A%o Z37݅@e\XXhÅ6daOy7J1p7Tt?^Qz ECK !*k9 N:,62`ktC 9?lCMaҨm)lM`^pPYDxpS9Q lL.gY SR9$Q U MԪqxpKZ4GlHEz1qBDl :6BIͣT_[DU\HTcDI}WQ/ӳ5N0exvĻՙ2pȿdoe B2^4D5կ5M!ofjjѕiFjXO W$Rm VnQV[pd{Gfԯ!S̔0gVJXᡇI[N̥V%Z.ۤZg~йjbIչfYNQG#]5UFyҌJM]NusDEv 8L]75:tNj4, އ贍O-%Oy@"M"1n=zWrR7n)N]x^`<,Or:(ăϯOZMNY 2`Siq `:%u β`g(NusNZ`Wg,Q5< Ctnն4mj>AwgJI~h n٪FhNO<#9On Dvv'}?f}x-t'>;te{O_>֎yϟOV]I/LgjGy! uo׷b1@ #i#_Nmt\O}ZN =Z.*pO_/wH^T7:X}ؠ"*9 1E7<;,d)x fت)t;%^\t}c鹳Ë_x~R3F8:4/-L0G'qҜ}RsOm6sxJf A϶p}l>0 Aܳ!Y#4Y+,#XH|N&:&٪QDe(V ~"Ts?W9@aIXw:%ЏsJɓ\Ǜ' BEq8xsa,Pּ^wb#mܢ)-tІuɦjdzFMK5׼|ikX.3t;5Cmzb@#OΑ8Zsq@Q_!R,I1h9+#bW<?;Ǧypϯ : h$ChB,{CL ]MkZI1lv5ݍ= m/WkunRo3ͿZw%DA:$vrj=ԋGb~/'ڲpF7"3B5@`bb8mz+`J:1eFӜZa/S-)0AQFz3%쓕 ^j3zG֣9hs-LeSj YNIv[[{Y,"phlg["r6R(\y<ϑ:"/ݫ~ > 3FR(>.{kFHJœ 4 #^$KPј93 J~0c,VO03T~NN¸|#m\?)Y6͟yT[n7q=hl7}hzq^͸:WOa|L@CƵgZ ~RA.QC/; "865k$&[Րr16&9Wy:QRN)|dz %]bit<"9'wCvu]"K}z{RzPs@˵ Ŭ6`m-ϧM@3.ҕ/~^W(h!WR ݖiS4y˽BN/f&^-r%>d~hKt!85}$pv~td4L^ y1۫7 3 "axQ9IA=pyvv!""ؒ?;A7Q=TVȽ]=㰹uh†jF,an$Ǜٽ0v9ԫf<ARN:]B;],8 ^[l^+!XXud{iA }^=xl3;6/"vVp0:ƸY#Wg6oe.x"KE޵q+bi٥MEyJ @Mb]K/oQ3Z3-&2 [iV G* 8btG/QzAu]gn$r)/^ 6|^ƇfKyv!(+H0Ο*[ eYǀ?}S[#oBh8 8Glh{B%]̼F dyFvkAV?Xw3tw $)Q{#'ZQ<=uC:Y *.-%5V~.V~Og'/bS@b||=SZZ+wNq}f<48UB{t@Jw"/NAĪ@%9j@T-mC$5:])XςKAʇFרV RehKPs`vL~aӏ4ע;qwOzoO|јm.?=1|;LK^xR !{fg?о{o࿔^PZ.M3[.ݜّ)> 4 Ʉ10% uR7^W6s8RuhpQYcl:XQ( ;AcҳJlG:*RTzŁ {=vӒB}x`ZTE567]Olv+=o՘ I7 3kTҳ)1dO9䭒9 `&g]fKiil)kº*gο/'vee,$\, #&\!j VI8Y2(*E"ؒTTl%(1G3HyF$oCGnPGF|8]$1berY h05vRp'eVb&_buj)3C^Q:&>9TNmDQ }eEew1//ySeY/K¡)7&=aBhKx;)f}&!T[uUMJ8"vKy(C3 YˡYdA PT)U09p`ȖcȟsMJe#TBVJʙwb DofG9nFcMh#T:[gLLZeRN@YȾk>G&4>J-Sՠsцkt`@q/dJd~픫6Iyr+wS9s9O@/8zdqq8H)vhlA$|sձ{jz[yP*ړ_:^ ̖=o@fSk(Ŵ\YQ2GudK '+&"Րd1ZGOKd5E_!M /: \+H6|%ٝsqd\YU'a%h "rZ R<%DVP} 航\?E+JqVF/TDIߘgV ViOXD*A$︧U}׬ʱK֘^Mbд.P &f}e%%SED\N4(Ƴ:Q&6ȑOބ-hr)d--1bJv DdjN[JRY&5C`a$(CM&t7%Ud/*q-^-v?gK_v!H>{d )ŚF !IXD1IWΪhLT+dXPF,BRGl6SfQ'IBWka\f1z"g,; ιX^EP]Dk!r)1Ovy}O:hDh v<5[F8gYk)ֵ~rn"wj{s6)D ˖1(Jpm&[ "F4@es,D'Ai,]d X0o<.ٙq9Z[~"sbYXhp'(]Ֆ*$0u.amaQ!Ym1YMj] a3zZR M/g!s VS9~;,\]$*Wo̬Gz9z&+[\ߜ/EhYYUs^e$bdK'^Xvl4^廤ϪH 4^Uq%f-J'f`5 =MkzC ȗUDzݭmZU}+1ɝ\ w_sLز>p kK@iAJ\m> <f{°vR=+>bX˞yEv'{` sレ~K$Wҵ sÏWB ~/\33ߵ\>;#_v{=8Uż'?Yp%?/|s3q8ӣ?s6'φ"\z<(R±_?N %O-`aʖCC6x|wD[ ŧo~ؠQ~>!'i+Kv4FM{%Mӭ]|$@t@w/(JD_pPXmw|B‚ Sdj 3Ģ)!skIl1HAaPf3A-+9A0ocf@Kkei__u{ǖ#9@ʋ+X^I^f7TCHnzygGB ^v_בbeA+I{}T́f_d0x2忂y7n^IaO𰞬{fn&V*Zᩂ9Dr_2v Snث0ĂvOW]>ӝI̹;= ^ ="餝6=v9~p9[g=)j<U.z^1Z`ǗcnޭAd7~a"7 5}_2bC3Lz%^#YY3=8h?sG۠Sn:ϩF1/b}1nnO?yQ>j+K[=dMɈIorv囗2FT(j4laUW /e˵] &~pԤ>`1*J<9j.tJ*yJ5]i^r޲sCM$?t@SbۭEX.$HZoBStRL#\=AtvO3!k ^jVV?4 ׅ%ʔcKCM9N☭Q'.񬭢*m ۠5OU*=TQ#aQtHZ&C-A5'QIJe(ZuU|ѧ3љEE S0 cBE$DIf=]BuerH޵4#NDx$9Duc{(ڲܶdۿ~,ɢ"=w"zd_qy0r*0,ڧh`Ȉsn1HzusYZ'@&r훔0?{2{$voˇͼĞrloZyytF(aSGsͳ?qͫv ^TaE~Wg!7 N̾ayQar՚WeIi6MlhMe#]6Ԓ"vhM}}F= 4Z"qjaV:򑒛$ vZ܈xn +@Tv/?@tv uow^O:NUHAr+2 G4+A">Oz zfE7ǐ+N%=Q%AUTۿF}`Y0 XEO$2=`~̻_ק7wGsTȁ $+^cWMteD1g>Jmk}r. 1H Iū4Ի"H _̴LgL*l,9!^/9쭄sSn:% 9\=7VY6ok+]Won?ެg9q]\T#0|RA.TKQæzXt(kb$w`.ucpj PbVeSO}9sCE7 ,|?voSZ|%E;HK\5 k%o#k؊;=za!t39g`tj4"]/=D$TѸF{2SKXãz= a@@X't9;B€8 ]'O}`R[d(2Fgc;㈒¢]]۲{oPӦ8~p8I!8젣fLHehqU*:Cwnoqϗok GFcAÏk^ VFYQ` f;85QT8_uQY GR8buև%[t {Ń6xnxF18%ԏ:\%/X~]9PmdW^T1by}3Vu +UVφ[ntoqY}c/9ia\ Μ@ 9}v'W]ԠXхQ#DV#YiM2ohGjjo釹5 5q҅7&Slz,Gn8Lq(bUr~rknN4SMn5*uldҚhnyJ /Pp7(a"}OV(xáω>6LoPg).M~v!pD:?e"=>{uA?WNDSQktwCwMF|5I2![s3~NQv8/YTPkG.7ЌfL7-?b;# *&&T焉~is-lVfnY+l .>@mHΤ]XB <0[y z# lB=[a[ḷ͓jQO Ռ}*h4A;RFb &p|7hEAjC;#9A;76ٴ<녍.,-qy3:Bt5}[YH%(?;(]gӅ 1$p4g6&"![ ̞Id2ےxGbq9o܁آSǁux\|a*MeA&l)4Ӱ|yܫjsJи7௣yg7aC~HȜ7~ I}P[Uob@Mô*חnf W|a# Q.hvsON]܏U\ >EQ}[FuA5hG%ٔFTp>J \2 wK4upq^Uy,L ުzS?(|}2=˰i>.Do7/Y)tPO=V{7=}WOw ỽQya[* M_ŗ@EZRZungblHu9ګk0C9x,6p׬1]<Щ!$ ?5ZNet%5#BC'+8p~-ٖ^Q/YϐRn\4d{IMEckt(;жYkpՉ Re&\=$lI^1%j`+TߛiM-LsePfdsy17rKXzd2F;K|cΪb*^ժI6gO>Xo IUGX#P8PL"4Vm錶*xVD #uցtA8 Ey^̥ĉfwHD3 -*Q^0?.a>ȕuthCA !bj3蹁!fۢF5eGУ̬!8-c%a^RES@}b%MVUؕ%d VoJCkJ Br\}{)zg}X(ΡaIK6TD6u*Dz5f!TJΏf KةvgDx(՚ Qr= yN7y~jâ%w"3<-08ެs8T%c1 }p+@-Յ8̰Q%3u#Ü%ܠl5:l-+XEkr`mډ Z89tN87VmLj.ZzZ[8qg0e7.`s㮊9T7ŜR {Kܸ;7_`5hwn' کcyrA˴jЃN);PZ51GXWM):2!ِDv!x<'dG)L~3ϲ2?ƨ"c'3YTr3.p \WTܯXRyQjY0=z h|>9r '0rJuFn.$_M/~VkAdlXފuaT@5p Nʷd֏6Mkߓg7tZ9:hO?/EuV?i'c_zJ6̆L5?􍝕:~05YN!}?C#u\`|jZV^h8"&1/%Y$6EdnBIH9aa6Gᦡ7NFR*:%r`}? [oi+`1'Q ڣU*&h\|87v$gpw|5ģ<$ܩjΚt+PDIzMЉ2Er{6CVzn9I럗+RsDF#}H3nM!˞pjW"3M4CIIba.Qr󚌤Gub:%Q~~.ѧ)EvD}TPgf7k HQHvcB7D=#23ՃV *g[r/2I^~y/ 7]>FtZD:R !>k:ibƕ$[DKȈu% PȖ ȁ*_m wִ/;pq`9.+6}.bd)Z.}DΚP tlr XqepVޗH so=9$?&0/r+`)5hP"8hY9$E]oGW~{d~?CxsdFp@Ø$嬽9$ ) 'Ş_UuUGF *BhSlFסڠ &ܰ@t9]t0( gKI"#3C^Rs;;D>‘$tsP9Cwoukk=~R )Q hDc kk,LۺWt~'A +0`|=ГLa*aOͷ5e:%*[:-WHbrHƳA.4P0Ŷ#eV)"(?AJ$~($csϘ+g,GH*mI$$\[@aN&m %,"䢃j1 'n(pYK H`!r{[%B纵P'$bDdBmm՞ȴ .a:Hp 'M.iAl;Mu66HxMҫԈ n6۲V#?Sڰ\s laB(v{Di\' ҝ*NPQљGlCT F̳yr#ɷM)"Gh[[(ILݖyXp1E"G/Hx.zĔoR :TIREم$CFE6kNz6ɂWLk5ent켂b0@1䑸H1̞b͒y8 VL,?ZPV T TaZhfmgt|\JO>uWqUXh.EM N)4.*^B ©L\?aCQJm1`w7w8'7I{_pN"nD\iLytn-`d11Al$ $F,~QSч0Asiͣ",-GM1&Bsvr=ߛbNUm9@| r]v+ݎoKDON~M*pwMߪt<6藏_|4^,]^-CIk?xͷI8R@;X2sp85bNq,%8;*3oRY#J&pzK[ݤY~~ҡ&տ$V{'7zPbSC~{I~'`r4@]+1 *`ZK{_|޼լ̰xV83pYrPUqoH ]X4kOs`)75OŇ?FIy1bUua_A8FRBJ[Y(OL :UUQl꼋QJ ]yfWyTX΄edq:̚;d~)r`0^?0~f[8~{Qa\¯-P;/U q4m`} (>b>i(~OZV!t׺( 2ɇvͱ d[ᇓ(팢ύhg<`M% :Z!Xl,Os˰" *b9¹h4՞{) &jMlK3DGgXJfzLL`w:oo!9Ȼ>3R z}s9#c[D՛ٌM𥧚SeYz-tigL2rr-Q!kCN8VޅO-k 0bٵ$wFow~ߟ|^5v8>)޻4<= LW`$N/NB99J@9ϹusI@Q)Ey`hS[t !B)cV2*Mðu.ޏ֌bSD8zZ$Oi3Sj"dA3 &i'?-@Ib̩]ْc?hE߇O^(A:d+@4!g H3V|g ƠU]5i9_ L{/iZa -|G8_McrU6cM1Cs08Qx9Ac~;7]/1;sPCG ZZ"6߃iaBZ TR|C@Dž!.D[$G657ԆӉ#r<xet}U Wsv? f)$by/p7jZ3 ҜQ*qj$P4Z)#2F@ ¼FP$[kv]1\/RxEBWӹ*z7ll gQƖc;{xxdw߿+Bq9[6xM^{ߠ_m1-_MJQ9[2 8LgMn4޼ՇoqQ!tY>32_7}f sO``Ҋʓz$$NW4~璎ɕNb64ge.J|=tNo@J]ɴ?UE >+p4:)ɗ0d  PQ^_:KB7~Uwdchϻ]Vm5 >i#lTc9bj0M:/,m~mGkGl}v;C;i |iev&]/XH9a  h& *LLQQq~sszؠ'f;$<'XD+><e?vZkb(k46~ZHYQrYǨ{,M*ءD>(hψ?rw4_qaYd<G=֚-.XKЁp,Jgc~fH.S'-qm&vxʛ%Zr 7 'jX%^w6C%ۈ!. + zmwȍg^c|Xom[SM/P!q6v9b1b dPQI#aZёer I8_C*ul! fNM>roR6o iZqQ2RZUJ˼JiWմ̵lj #X@2J1>F[(;ꖍұ `DkWڸa'9G*Vt=:^ TRZZ.C+"sh1 پY * PL80-QUk"(!jΐ8@FGFu$3!Bc $@⓵a0pboc d P!rc)m\Y#9| mw8ۢ/{p]E:qIw(ɶ eَXtcYg< ,0t'X"+b g) 1!6ϥՓGYR+Wy$GԽ|T#ʟ^'P%jQ$1R4()AԨlBqL0T!V\=-ydř?ۘtl5@ 9Rzj%Ѣp-j-Ҩ !D.F!#%*Ɛ;99~yZ=UK)l#_Ƿ L,SҮ#!̘"3k證&G֙|ͷwm |=I+st :Gvl$)ȅʏvaJGdWl6S],oWMT^wS6f'Q|uPm:1͘Dn )JR#I!_,0WraM/hVm/al"n: l-n%/#3*Gxw3lo-2:c.Aڭ/A"\IʝG5ºHJYj]ZմwM:W=osxG8A'ޱ&զw\;c-Dus>Pu$i 9%kOB^.0^GMzk{Xw;1]U[ s>0ݧj$DRNDfLkT.hl '`}:06sŠh(1Ű¥V`2<6r*ᒤ?1"E5aۍB_?MPJl+TJSw,J%ZzByHwcV'{jjam 2-Ly_ufVA(\'}v<(MQʵ e'tA#$Y%|3uT5kI;IΊ_p]^ϳA1>8:,DgIi/ߤrN"`4tp~MvVpY\ ca^?{6.r%Ia:v54QK@vUr=?ȱ aa'ҩ} X275Th`> PjRjTʹ(Ez,'3>@#>\,?ӎ01Š-@j0M,l.B=ZsQ>0.˧GUOwk0<ȆLJo1),/Wp [W䯷?-Wy6} YltNńϝտsM>Nqjz#wOϤp2/4Yo˯[0\Mok\тשn Yl= }4g/ue`n݇BSN;*&pʽ^թvv$) ϳ~Kehi]gE|9͠g/DCy1pssaRQ~, _^zf]T|꒢/_e˯/|;FE.F_O.}o/4&K ||ˑ5_ ppy\442 : śbgz1HTװ߿/~ɧ$_ETK$VW|.82<5yUUcFٷ++Wz1c'~~\kܜG3%GVwobd„K$ML"AE4eq~NF,'X,š> GKuq}j2T3:..S2SɁ 1\KTV*O/5dlĢfI'T̵j{1H CyJEՖB2fnZy;a_P!p(R#NT1(#?[jO|k^K;CZ621 =%$(=ʀ+̘7'ȓ?X[+O~'}p=ufn@ ĕ=}2˦$W2Ha.*w8xc^3i(Z/~qm^kȂ#*ߺ_ӺNION[z[yٷHczWzfQ4v w5F VI"氉 `!8Ea*Neյ_E+x[w̄\931DY`Dpc=H}ZnE^i+Z+"[+V#/+ I|ӑKb+hd0hE̐$5$"sd З+ڽ8QEVdkEֶ""[+""M I^[H)kLHO k~ QE@ЮRq n [ Ja@mpYhk<.4=ւj6}Hnin[vGV˨e-mjɍ }WnLbD,(b M"cT`֒@p,L67R[rTU,&0c]%mn[rCiLsC08.'> 2b$\Rr\Ɍi-$-1miKL lۖĴ%3W%-Č'Đa#q#8cQZpӖڒTK +D?:R sR3Q2S&&XYadPhȩB)5IiTz V*[nt/( Q# Fn| B+zrnt1b-7jQˍ IN_OVjC:A2b 9@A j5R 6(@S.Fsl-0(E%Z%%y]dkIdK2c$M"c2Y+|D"-d1F҉˒\ZZ,uY c] [Fs XQR`c6绸~!~ cTJq%#L'P@ eL%)C b,vQ 

"!KDj"cuVPrq,Ui3L Ҳ 0%X0+`N(?^~y;DjEA|`ڟz ן?3(z{Kږ %ӝ_c-R4/9q;ahɹ;TR%RyҊPJD^+f~[&o Z+ZWf0x@Xq?rQT)0$ e6Q/̉se={A5%&5'q'  чHQ^ddyAȽI#G0?Z`zi!󭕞 Q5B N#%[v `oCx9iwS&94Ơ,^_ݰw© D7pU) nw` >Λi֟}eɟw w6Xmˬٌrܪ݋ M~S 6*?lD9s% }+Ġ{ǃbNΟ"F\5 3L<|r 1ؗ# iJSb2CϦr(&k4QNXL 4(-rF=߿Aӭҫp&-q|I\>Wa)^t9{pch݃U`,0jxC Eބ~缫%`@v{E%n]Kx !Ў3y\hص@fb nM%=FX(9p*#0/}s=uߨhȂߛJcb_^\oD@ D("^ʠlx -{m% >vE9{>ՅN'+:njq*οTSbu1 u"M%֡%&u؈O[BPOSeHon:|z=̇ܽo&3<5[̊3H1!̥l~s4?+G7yOZd3\tDki*Wr4Br1k| vw5 y's*I2`@kdb8MrWpF$<1fT#V@uc0!nԻ+aTf%C4ױ5ʌ@9\J&PYɕTjMRH &r.7Z(ϔ\TEbdOTDI D",i(v`rsPJ!Ul)$4FM\l0 J5Z )⺑*rܑ*%0 !3$LK̕" i #2N$09Φ>0bHpweY1XOTh?{G_2#4k%p[{rMmHo/=ř3m~֋-{fDv(L!P0&9 tQDE8*DPRSY0j-@0WNi1(ĪRӲbB3AWP܂, _aXe&ppH`d0%eB 6ܻLS; *Nc2KŠ,ADeKlUJVq$VUaRQ.<FZǚ]k: [TaG)vޅZEd.e^(..1db0.D@u+*BWHp$h)z+D#U ]X=D$bɘfL,f1CrvfLh{ v!^ 縗ַۘ!'<څpBalu^nNKFN_"e- lǠ]$A@,Zr0q{qn (JEC a~DQL4r4s|a]=k"4#mCwg?{,WpṰKD7?KqPrw[ peK!u .%HѴLqʘ#%*F@hA+VhmMG̉OQUG3he.vPu4w-7TXf@K?-*.,1PBBs@\Ck""(KQ\[8qJ2"NQ]ք%"*tлa `ѥ%[A_,-qX*Fܭ@A,9@{CRS\{z)О5iWbSfz4KpM!"#ylmx'Ewsxә$%D)oMħg4o^Xz|hCAHNiRc@p5ia7B/eT( t1WT*=aNřA{?Ppn B/L~1qy@R3xe--<^AQ$0cH*sU<}_RJ- F-Qi h6 f[*pDZ( KJ\6I )zy,^%-5aPh;Q`N%CB[&C3MDya, :?BBkY˪hlIhi*-N+RJ\XT̫$k aa{'h. *h2#h;S Tē7TzL, ,+t*9K~̀~.(:(J!AˁhJ R4D|tFQ#YEзƟ2'ң/KѽP{D8cUf` R!odBF.1pqB%P#\!D#6x!Lj pp!ZsS2j>hP\KԜTT;ױ4eB֊Z̈́>d?Fx̬zt %,B2RS<ą(O)JɄp"mx!;m9P^saC$9$i5x7a ꛡ" vS6:jB{"v"pH'M;ҚD#`E{bDcK-FMKPB}C%ҦJDd(j "JzCJЖxEaCQ(lxSQ$ •%SF:XXEŒxbu>+!s~\p%oSk7(z3/?_\лa},**Q`vg̙/P2XgCkޞٹIwC6lskL:\ I!X{odQe@d*3Ȅ+Fوd2k$DH;L}L?tB&٩7I8IL~# 0 Z.׿]PԂH1Qy"Dba h0Ȓ Y)-tȲ9(s8D @p,\,%Bʨ'#H]0yzÖ1ĬQr }ԚW1g^RMO)eY d'JF$*QF$\d&%.:,Vu"ݻ'eJy4ǧyۇz9?AC?/}qoOVyFWԏ?Zϧ8!S˟_һޝt}?SϜ\"p)y k~v'H抌6JXǜJGM.H4=j-Y\3|ݏz7YתB7}6;n]z~A_C~&4~p[y߯.ͩ]SZۡ/$n!X,ʸTˇOכ[V_O6o KqOfV(LJޭ>Hu皟}}BUTC#ߞ2nF ϑJo9 .V399y@뇖G[neqw>܈@o B5`W&σ4x !+wfZQIhG]QډMEbk\`Rx^{Ŕ Yόb-h/T@r &pZ–y z, SOϷ-UgZi'^=ZxA,6J?4*Y bX`X݅(#@~xsX⇧!x{ڈy+ç{̧deЏ zd~ҩ4 0]-R0dF4 1g^q~^&qMuR0vqiysS զj_+h{ q 9.Ӭ뉝ǔi@+2Uf0 [2oޫ`hD-{wiHxSywA)SZ1*[3@55)DI_*MwΡŤk:RFi/=|Q $2hP)R~c[-:fmd),AI6T mEYUj$!F /TlYy &:{ǫ"4\c0z(hWL{1%2RRw~Va[y&#yLՍ%Su-/fػ(@7>IY-Zr#+1/w'eOc{+ё+KbFCaןӼbxr2cv;jiqa'AFحw7fZF w7f-:0^"sp Qw a\wЎHyz]lF_ v0*R+'f+6Z_mf4S ׻xS+ƌ^bQSTq(3n532'S+ Skjн_K]NBV)?Z[LIKZ-7b<#MĻq=[594,dĺ+0#nE;QGiDQ FÍ(͘8բݩgq|IvCy-oj-4x"ez(5o/v_=P[xG=xgSމxc"e1!yG 3!g*s?Kh`h?`F:Eǭ0ϒk 7ΐ8|n .}Jr>ܞܬn?K>x,Ėu7<qbp;kޣ  06]񡮇Rn0A&/,,iܽ7a/U-&dD]~Ɉyxr ww*GiL+92GiL*ԙˌǗ3jJuÄ Tn/tω_gB oJNbNcd1߮r pԀ-<c՗G-crG\hCSw2#1Ax&@3 }tPs GEjz 5ۛΐCYcEp4)OA %EדAQ|KgЃwA:j^pZtF/Fhkm8#6p[8eߊj<oT FFJY'kqǷ[O8WV_s|iR^<ͷxS \ںW6OmݳbR#;yzt< vTkTfAx8Ag#qyEwTq!Ã([1pC{f3"\tŮ)/2OoF=fཊ ć>fb|>Q1|'8Jѝӣb4ƏHN%1S`B( ?5q ŤdtfEs~G+-;}H%̄ˏGRZ:unIYjhyŐe곓O61F^4pB΍UJZg'" Yr2ݒF EZf=[yx%pPbļB;ڳ<=.6Oc|w稔*bX:[UyD;(c{/h)J/ @2|fA$ )t;kZa c7~ m홏/+(X_rTYT`ҕ- #R m*@0e.6s&+A9I3M]]J|.UAg, Ū8d%* (#|NqDr+Q`-ǧx7 MԫQ 8@)TLed䌢]*4!>^ZCAkˏgoj%Ё,Ƽu3+Q^y8'ZiM8Zî ̀+_{Mo@S5X pޢRf]Hn0 Ĝ)'% T-wۭ_s6#JrXoy8qi$K1Q^+/P[U *h3o*PK CG:Y)\ڒʐP/Y#寡OfJq[iz}fB]73b½\YWP WD(- K4+ޏ QS/fY )F `;ޮnJ;D-Û@(;4c~x_V ZIY\f QlCI! :;ٟϐoV)jMw?wrҝ, w/MhhP=J/.,;ްZ㸲%y:nנ t}1B_,f/ΫʑHI-$͒ڄd$ p O}RL6v}"%3`ߏ>W'E1W'8=c=- EE[8˭N19)f@} ,1/{q{EcOkKc\ u>HZv-rPr*TDZk-):4z&쏚у)!עOlNDRG.Z;fI:R7UE{irݞT}HF>K@ɻRE]]oWv0ɥ/ ͫ>:9ڰx~I?OaPjK1+YyOrS_?y> ,Wh# צr3>nd Fa:,]s0x)ecRN+)|}uZdEzZ4/7*AQI?JJ$^l^dT'x1871dxN6* Je,XͺښڥG?."TLG(r,ՙr \1ɼe $CI$hjjY;.$7;."ṃd!(͐ A:+RycwX57LvhidR9a@-B΢vqM6PG#a!8êB 7n{ŮLrޯJ{0ހ6:E]bmԲ xlIʋ uRYQeF4V Lfe1+}Є2a a;"$--?z ̣2yKLUkiBI+;`$`K- 3S$F%5rF G:},jmH+@hA :@ݶ XUrheiQ4$$%y 1ۂ 1'0seQW0lReeP9Isk鉳 i4bJvHё RF8OϖJ0hidHB̈d2B"N% L .6t#,NyJH&l6-i?H-O9t9C 6')$aՂQ*},L& \p[N*rQSt0.Lg $4êZVhdHԒHѤ }*ސ0.Rg4}˪r_Ӯ%M3I:`wuvCJG[41J2}BUǽ=sxcYFNYl\2D[''U揄lOGBe|]/K,&궽J70vIyia⌬N7`J8-a8-aza>,a2r ɘlDg3:41:$YNɺυվy|3y?QϳR67UINOmdzd%^ی|Y{Z0Vxb!#!ApG<o4miKFO"H'&ƣ %vݵwG]b֖TnF6u}T:/--#Vg_ZeSKrt% ;JuRkEў!I>lř"eH::ὅ Rk(:3)$6v-P!4jNL 쪾![*Tt\'_'ɖaKJn#a>$[?DJji#&-iqT >Iaťld R0YA9Ig' l9c+=rSbĢگAvoo,!EM]6 )MMqhEǜ[NoX,b\_M.W_rX]5' '>@BL|h'U;bt%t#3mJgǺbKp;hz6fM /{܁Q01!FexE|N3c'%?JHcKxI[YWkG38"+8u6,dj ΑK<D[2JZSs*"J#]!ߖW+0VC>LG0E0ń20-2,yèPz&eLo P3h< jpJ^"$ =j1GlƗ,^E'k.rzK;abHV4c)nE^rg>H³RFhNA43On#g^Bai_3\>>d: 5S $ fG:^hm#]w(>nіw *d6Np<#]_ԩ[oea4ַZF >hCq \H;aX~*Y빹DtJ0~E: Jrۜ _47XtkSJFL0gSu-︻ƚ}=_xB.h Î7gS+ST LZp劰RP[/z7у/Sol.w_uN示hPF^j顄zmӲ5O<-[5WBkѫ--颣^njJ) Vގ/ֻN$QjI=pъØG(cLtx$!b;<$\?$՚gJ+j](kti,vb#;!t*nrVLqJfI]FO&L%P`9s!8!4EVbgV!ِZjtVPZd)BXFɥCt{"P d`5vb1OSn\F!1je&crͧYi~ie__M/)ѪVi[-b5u#ڝ]kUGsgNNt/V vs:kzlq|6_Yz 3N%XI<5UG/t.gzlzdA)rh&Љlb&jL{Q1hXc,ɣz?Te eχd=O6`3"7[[YAē L13fY$m p5'R?(s[Kisg/Pe|J;<c(^jpJkfduOy.)\m~d`RN2 &Q;$(FJ_; 2{=h3) |OIq\ ٔ"̓:{d#獔j?Vڎ+ŚkOkHw-!?+Y7yf.u/>=OU|}1X>_Wcƥd@ZChcԚ @PP5uS(]kVN]>k)DcHwÃ] ldt D>Plm'6i6-(܀=3$ 06!L[}>6[}^CڊCI+-#<=@U[@|>-OF0R Jwg˒X6rZۜh}B)ϱLh !y6}0&XK;}SN:v1n0dR9c#p%'XRc chLƄ^'ӳ=tgt:izgݣ̶~#Q kGQhtV C,YNGU{3Wqط~(|Pk,m%Q'M79U/R q\^-J.lJ1-x^>' eqBZl%d2ћI2\M9f8fZW/RX}A;t^8&[ֲ6J"YsN$eMTsͿN\^dqq{q=zmGߤGKUQOUҽckNwmmpNR_XqRNƫ& lH./RţRIHafkmzRfsNI C Br'E˳x煙σ)HwS^/O?nt>sL5 ^%;|H%q^n%c?q F˹4$oOja`gYAP~4o:?kՏ</_;]fV'P,^Ʌ.s@ b|UOd{oƁ &g0A "I1^3OfmDo#mXbe |%8`D7DX+M+y ?@b0Aq=a@v}(rU~z WMauL A:`]=]~d2?)8 nGD 8Pۥ}C{mqh/UiOhv=hEnv 6A69yL>Uq󿀅^hGC";ubE0tW`,b6`iY]@JW둘)ԟI_. ֱ0g*rT2E `JH /2b | X1<62@:d3HDg5!TaJDS26CF{.MwJmE{ (ږVM3ؑ 3mJ43>mhdO-݇AK11?7!"8Z}4;wqTK)u4kiK}Aqi'#q/|8Ǝ R`X.JB–ɖ5o?L1v,o y4 6f&fUGíY~!`j@әwۘGD2^xC#yCHA!]޸Bkj2{Jbtdˬ>=y,b]뛜5! dZiE_1." A(>޼i!X~--c,y/Ֆ/~&W'JdVqD^Uv]k9 LP',͔|#d“,%\*a2ASJ,QqbYы T*4jЍl"bQK~ ѼԼ'"f e!ZL!BuP`=cQ72iꤐgxGeDRZJF0#i7!1bAEt Hɮb0o @JNA,ˌF!n f2әթөޠZr네iqrȥ  LPR&pWYZ^IpBX#+7щ=+l-::mBOa>DLF{G-WԱ)b)~.oOJSqiD$JV^ZU iZdN$i D4`k)FP/ u,E'#l AT:$ +y̘S+dƨ!%(@^kib0g GM ^(]I{=R#QqG{ Ɲ}fY9$ Dn4ah2}.7X\EOQ,h4 ;3 kA!&a&a@jm4 [pMM]QJwۡsCӋj 3[㞖U}ǿN1a7z +vν&HTlQs7f$Ö@W~Q jm㰡kEUPڇ&g v.!w-&=Jw".Kd^ѠUɰƌ*;ch)(~eT,R)`=C)<1~؛!U<;Y i $+B7 x G{p%egᓅ_ xky^MR6 QuTBD5k7^L={HN.,[n 4l}heߐDmхQ>q~׷#z>`~5Qx_\[ăt=Q⥢\:;:5y8pA4έѥhc}SdzP!P si.axJpKMTcw_21|p >M1,㩾>l,6R R>2`V1/[0Z)1E;+}Ө+ѕ{ASa-GYf3NF2!0f"UFR5^5.x}a !~Ѫ?O?1r{8҈BIts),*x!<"2*PE4JgZU,fUiO(yȻ j0E"֪S*8v`ׁ t`D: Z[`gjr P8֙ %ц %qlz+JωۆMPR؎6^Y `;Uj h QS%Fi UQk`LR@֬Z:ęȁ% fi&2E0x0Jfp5,P*̚r|)uj҉\~1^fW#Z|LLd9~G˹4l\n}V cWՁ6f }?:?Տ</_;yXmXcYcL9arB.25څf0-,D$c 'ǐ9xyLӳ|6Gm߀G۰rZ'K\?p|$WJ0"Jq"I`hӃVn,䝏 ^f2=ab#`4&f]~,+\w 0oAUNV3uԦ <3) R8sob(S8# Ai ,Zi HuihS# 8Lc6i׀2vL3o\Ff5P PRNx1-d8m z'ˍЍ]mx1[|4y1[=&_ZWS,mޘA7_x69mtDQe~q=YOz3YxXBe%T*=Xc P2ihr;ɿ in8$Sٺ Ox"ɋ'BI' )B\^{_^׳b^g䲆3.(D`r~ߞr\L'0ٝKK,dB'SWm~K,)d(i)CZ2di3X1VkLfgD2 ug(ݟNomfBvw nUM֠xߠkIz0砛l\PP-@4%4B]B{UBAm62Qr8 [ mD'-@X0NZY닺TQխčj ({0r M{#dCk1B(d{$d18H}$쏏.tZMf.~{@*Lf\^U*Z`=wf哉7-uU \7[ZyU޸tesuc< )F68)iH(tJEJU W%6J@l/u%I ׋mnZjyMץ;`ۈVPXɤ(Z۔ $0?,|0.v PZ+:OnDPyVn8եr3Q[:β`֛IdU~@okQ9mՑG dcO:O1`޳ͧq1h TPtmWI # YtᒝgY{6+>\w-e$m" XYRo~ϐDlRVEPEל12mFklNꬶ ~~%mRuĽT"I.wڨjbp-7c#/2ҿxm0DSA.n_gf!w :qpimn'3;GkF.kn2䯓[wo( O'ݱ>#~}3~:k1Ytbݹw݆UTW}yW2y`o^ +mЈtXuO>~sk :ۯ4JNOjMxWxC?/&~W@KIms!\ƣE?f:_ (^znI[Yy%3S-]&vd "tz7܎0{;;n~hۏ7c@8Wđwt>Br-"47nNשɺsW9C \׌TL xoW4^9YbgnDJ#|Ш+cVJL'x$Hwμɯ^fWw:{Y'On#8wxkQ*; t0X' }]HHZa:.Tfo} ,V=S%LO0:dsB<NgbO`tH@wf`FwJIq4Z;UJ?~OX=I-ǵuNj99劤W:Ik5+AyM#HB)|_y&&h~S{s˜lFh(sww\V=Jf<q5૽fj4vUoXM.zh7J>^\m26?jD0yqg2 OyJj"D ta0$VE&a$BaCb2R[e HwvzgP-@M̩7+xk/3`kqe4csv(qh:,hƳfŢr2h Q KꎇF3.։,B_<.{Xj).Vw<,!ͼޅ#ӏYm}!56(] r:3AVsmp4]|RAl68&Ӆ+oĥji฾sۛWy]hr=Vԯo6u;܆wg;.ScWH\}7ص& 3kJe~YYΐ?1n 5#/`t #,~GnAbexǀs5pIo7Čq‘Bf>؍ ` 1D}0/᫿AF| qP!2+ޟ&h_7.5m荑 #eȮS'g׽5='3I6a"04" $s+A.O,k|ЖEzctE3]:! mbT^KEyKX;`;ٵ?31o_2^35؁1n=c\Xqb11Zc q{`WE]j t.Vdm{qp4$ X?󾒌9:$1*cC<ގ\ \, ;:](*5@JmL|Lh83tXP5 MpiVOG ihaQ#"৤XIŻLmL 9EiƤN!)[E E̫TZA ,BYB02<̆p|UƂGƆS\E,ژ67\'#x:hb3 Ғ*fq`4XoB>ұ!z ( &)ʄL1"BX IHbN{"CaDx%"%Fpgdٖoe׽m9|}+T>#Κ`TR2@N >~cB ^~V -Q F)Ɖ(!B4ILe=r4Si SHh(Q c}-򕷊u9:J?:H)UK+ԇx6cQ|kVH+n[8%7u=2`zjS\ZAdM`Fs> pQ[gˎzg٨Rͽm% ƒrr`煔A8Wq*p*ܧUJw9cl3n:{|͔͊{(}͑$,jwt8)2#UHVb-t(b%AJ\q(\,A"J,@J;c'ep+щӁDHǻt4PX:X},dT\nJef2=@+M/& ѥ w(b>5F8BΖ٨,Uwx>;1{;&<܊OE/M"~(p ":O`edS|웅Jo/* ˌ?c($7C&tq:X[^_/}/K. /Hyx;-^/`A/.L."NHk"n*5:T ]mBq>۲d6/nP(]X >g|n$B!/¶ɰ8+4+ӞJ=Pgz$1ܳO_ENk#D$/En~Qn@TW}evkJb* v^ׯV7dM h>K phldb 7(`} . d]y9Z@sA0`kq8ǵT s3siL8XT*h2`\w|+AFS (BGD lW=N=4P\2%- `TTT䊢'e# ڛ ŠT=9@DLB틞Lĸ=$z&KM| eO J }LhfJP{٣$l_XЀc!fsRChٗ=w΄{Y s.ƾͩR'^NCi"{ރcJhBGN9$0VN3E`KIuRNPa\:R%‹Z"_ח-r:Rfϟ_jfY\w=®jeM̏-4c8Tpޭq 1B aZZeOaw &>wTbL><-刪z;ʺj;&(QWmxbK{#k3r3/\~AdCLG&ַVzEGI٠5^QbͽےIC]t!w:].ag= Q@\V\CC#<6 6Ăy&vy~OecQn{uonOe蝧yR&&eaK@`sKI#o.̭JqE~̥\ɽǃZBJ~>pP^dU>>`k2ەOx5!Tjy`SO<1ٝF=$H'|Ro6@RzKد(4.t'YJcmUitt[39g,,BcI,3/][ pPgoq !&`,cBRh`3ڧ9azʀBf`-!:hr 9V^Ò3FǨ? >euMfSo0BUo]5y2cP wZ*:KS,R"ܰX%y:i>a+I̻I&V:f҅<3byy ǾÖ[kNE:2MHI!2.(@ xr&Z:_&PaLI+A/|2\U;ޢu0^6V@Lg l5P'5%7AfBi2 ;ΡIfbO]PRVPo>\St+o~ 8yl Sll+)ǥ>@i((ξ9}9O VJ&Y 5krVK)B.9yL9_tIm<ʝkfDž`jd:U^u !aa˪hX]2s䏺`(")a.וU}Շږ&PAIA hr*^Db%튨@eMZ,7W?]׊čc[jߣd_oEmz'I(oMLظ>c-C7puX},tv tbS z$?y i󛁩dDk02tPoߦ6( <`;da.4QN+woQ,QWhyvǺ n,E"rt #z;Ud@ @2)L0 \q9(e?ͦg =ޘL3Ȍ\peϔX,M;qn|Ou>Y͟ AOg<׳fkn5- f`>Kq~;`|yզ8`E9SYc`_ym$  9 :]uꈂP8\ {w`(-V vWPS,0Npϱt F'\.Np^ }A֏wAG"xPRNڂvA,e=$zgf_yYd]# Kv2) CCNe}Lt 9 [*C[[ִGE5U_i||. VF*Tp{p9^NxZ4zoOU.ogP{poQ:4fކ\xE܁U^}G .J"sN~HOêU󽆦·)܈j ML/jqRvLE=rt%XZz,幚<,ݦq9fXGe ( 걲Kdtyke7 M:ÖP7_'4=+w! ӟz̧f6hx?ZOw< Dͯ^+?*+h0^VNq&g'{r6#79Ͱcn>j*YQռ>-k.kr`gn>Z*Υiƣ?bFy"BVB6ku b5n Cg5HnZ5"nlk08Ⱥkon{vبwba$MkȺ/ "k|(+9辸'i\#} 5Q7QX0 giS?uۂ n>A0 L%^?lZ"vY1FV?k )56!>Y&4ϲkpO!%3ȴ) "Mkz6TMWk+욌`@j<;zA҂pM1G2w/$x=kZ$0Ɍް@)L4_>+dttW/qg0&]PLZiT>ԄAtd >{3& b[$SOCnH,*;S`f##89ùofDO d/?`.^ g:WJKS\OOp 2k`Q@/O&n+qp>3GFs|v#ACJ= SvC0 ]2ؙeR(r%\9G2HύBKNq p)( 0E}@}YE[9Jw quh% uW7ɸ}Y^!>}}쀄Иrt2;Pjڜa F#rR0?:Tn 44f+#6i%X*s1 L{$LKl?@De EaY~4R J%'H r;Yoy oJH`8en-iάsgsDŽf-@$="#<.QΕ` J"b咅^+G!y k%'H@=\v"(d J T^"p&u|79 JEF7+06ڃV 3a\ۄ@K6 Un4]RFKL (w`;~N3[*J(Y9] @ِk;@oA4^ƫ_'>P݅N7?Lxlg3 {fDN_wǐ ܕH@b"dXz6RID^M,9'm/O%qD=x>\}}`,1*.phd\~\c%#r Ijʍ-Z MJdyv5p }.1AXYz ۣ,IYiF ,p%\ v˱"e H33ߎgɩB PXpUx4K[8eeYr$R=`0W&Ek?NW ƻ3 ܱ]!agX$ E(VTG2H.#) ޑ30<Æ%FuZ~ַjsL8/Dnl# ,85vIf7\b!34o&sϝt6dsx`F cN}9&&Ux#0ٞ%g*1}RZs@ M< b 9̂m,ld=BkBu 0Ϯ5Dn0uIOXO /ȭ'ʬr!f`Mޫ%DXW^&)M,w3ʛ~}ӚD$΄'QzT`=J,es&!]}~sϾ-f~f~Jb=-Q5lHcjv HQ!HeaiU9QWx{*Uɼs[nVLY? :3/9q&bܿ ʦ2’f-* E0:]@GJʪnHu /nW9ZӬ:mGQ7inCuV)-flv'o'x^/r }?d!).CۭAgųG`|A*RƦW*/n%*:7xIp.eуָuel# LH$Fƙ$u? *}y!JhX#^>WoAx1BlWKp;N]AJ(t:{pJBKDjҢRW/m LG[yg]M^R߸{uS $`3&+֕Cqc;#o9`0T6I9!Ԕ ŝuKmZ 6U v>3!\r`%eī19耮AԱI{T wV,Hۺߤ+Ǵd3I'%mUi[O_.2jA!$RZNBWKz1/AUu泩Vmy40aT i۹ݼ˻yY{ˈBFӒUY#1lv8D $L`sFq`W~eYALq2F!Ƙz.CjaP _RGPH0_H=@8`^滖aКJҎw-$,k(gPw|XqLfŪw2&]b)Fu9SđeK #.~Dg"8T %q!(/HoxC3o&1AZ\I5tt:?s32Q:=WvDQy}uދ&4'޿G2iNKq$KIjZvS<㞅LhQcXk׶Juе9xpӧN]wW6Ҹȗ*mOw$!Ʌ1P%g5>hqGd{ba{x)b0NqtG9ͧuJ{:r_?ų^XX3o:,[[;2`ǻ؂Kr!kDJ6-JH[PҢ [dfwjH5qWM$ -]qkWQ[23<-ӝCd4vbs%x}nPm'D]%yw#gFies<)tUGW֚, $ͯJ"Eb!j1׆Y v{ػq}˜G_Ĉ%J94D"I)MP0$#<)f 0*Q\2#Li+cA5>ֱI E- ^ /QOn+ݿd*1k$G-%4)Q-Leť Ae! f)RBA<Go:RpW6W}r۶?w1U$m߶{-oۆАW^:.9:+b,z7mNrjl6W6 ܧ&bؘݦM-mr y*SRJ[3)~Ï|xށp`ʌ$X=!x7ޅQ+&J1Ʃ=]ޤ0Whj*vQϬVdA[<`!Ɖ ɐXy|O$S3I6H(H=0LmOÅ!;AK0^L@3DbB 4ultgh*X!MI4Y;4Nϯ[ͷܬ 3EUA̸]E;z t\X(uEhI%Sj8/>A;CGG>3M_x}Wyq%7Sȝ8F7 =XWބ_1ЯU2-7YE=$밹[G1V»!0uDC EPBkQ syZqv^[˯oW]z^LUWܗ\\a-q,0U6RYb )9y򖻵uAxou=&%Sn<)2%CJL!)re,bKl-B6o܉j{IvpL@$\Z{yc?xZ-A)ڄEb rAgNq: ,9m<K,tpm)`NC BiSkpa Zζg [R}s|$$]*g%%R څ{3nvK)cm'fхkium0/5,OoY'Z+bCץ;42vhdШn_MC\J<\@NRaD3 ^E榝F(&N(,r ~2|D_MZC.>yэ.jf cOlvSl/o:Cu+Ze:X>kuh}|C=緘YolEhi~.;W߷z';~[.WFyI 8Mc"qJsL#IdNU0PjK?RDT,媄@w|?+&ŴNO5G K|_>.5RhEӤh4:f6{j=tAsN.%Hng78̲CUG{gpon:?MM_*>!z#kR,y^է$]}|֛XlEbt;uPsHK\eϾfl4'!5۝:7A4U Q>:,cj5~\Yjc>ԻG |`,6' >Otp}*?ru?a&w6ǧ{!pm"PPZrpwOs$ y6OWΥJRfqN[$K)iC;?"5{7drxarBF6 |>^sM8rq4Y%6G5]M|=Кt;PPeRe5pRUIaP|BŤM O,%9VF8"`mQ o66o a$mz/fdJ*پlEq[{K ֩ *->b=nΚ9~z/=YzxܛQs:pd} T{-|I' {B[֭ Q i7 n}~WɁcJh KRnUe}?4|\ Nͯ0gon\a( QnjѓPQvj}H_s U6TRyeBe!8$!:񐔕IR8biB,OfD- !|D] ˅NZ_:Yb!ޤZ)"^'2M+ C1X̫Yg⽯ l&Pi-3+FYtʟ޸N_KVYM*ϵ\RjPΗ| y*S:VmYIEytV&76JO9i₁[9q;%b@D &t`@i*2YX?vŀd@rZ ݔӆLjvޖG@4rE)mt.\ZQxhfnCjQcpGhn ) $ ^AJᎊ˜ͿSqYw/.s w6w$;Gq7;E/1c݇ (@={jUQ($oU/4xnM8sr<5ιB>"!zG~'Zo#p^na^P-8^[ޢZI l9}rӘ IDZ<!\E=tJ)ģBt_oC^A =>3o޽ Rs..\oqgI45sV~T{[N&'k{S)+Fwsgo?|^2頵O. B )g3xERk'a-jlE`Y2aˎ7gP rHvHLQ:{ǝ*`E1c+m-u"lN5Or%j}8OR9дN쀟g ڡ'/(]#JuǢ j(jBꤣ{ 4d9+ oP)_k>SԽ%v='S0} ;k4?d̋+;rc9U5C`3S޾>6:"s$)Ň2QTb2.]f6/8\HH9 H`(<bDT$cL7}MJEs1;n)Q&i)Ȱ, )JPeB% dHMZucIp`{0Yah=cDϽr~fa 7;6{/VTvUV0^xq[G;m_k-cY6/f_lW#mJ Ind wb>SB%`NJxjk6̞mvD[g!l-y:%n/wH3K0?0Ox߀G`5矊ݍ@N a9W*4P$/@ i)8HW)n{" N kţ8H?2D0Vi:u _UCF )xvef[=;؉y ̼𗘷 . D$;0" qy^b fژZ{3n՛I;#US\ 6PhPϧXUm2`ad Fad0LӜ "c(OB\  HHL)U\/צETj}.fjэV[fctjvSl6/ocġƎ[&)SUAJT4Zy$Hr2%\yԢֆqjd\-q2b?3Er.DRVf٨2gŤV4T=[3Fh{|XKiŻi4)fWg3fˋ 087^JJMn4Mqr;e͇~{:yZ ԧZYEÇr>_|_}CCSfY%Wgջ ۊv6٨!; RѸנּsݨڤ6F5/2$WRq3FR?c)\̘IN`bǨ \A,M)!LH{z̵DLCDR7%ARA嬺N`)6ʂktJ-#fcXHhm!Q5tAVѢ^C (Sv U| lEl \b%%UCg9Ĥ; B;S% ]4V0jG-( ءT^Fj(@1P´!J]Y;SU6 [*TÅe|Rm|1;;է)w4Ю 6Y/FN<03[Wkts4|Ѷ׌fyLxrs?T_ @:r1$ Hr)F7 *?Zm ]X{Ws{q/bMhMRq(]Jۓn'W6"*ƺ'EVw6 -FElh͛ijq8 H@ Pµ`mQBgOmC"ߋ;s7/!J`D%p6M(=9xko 9AcS?tmڻ%B*qhU*2>C›^ZND44Cq9 #̎r۰FhͩRH`_ҀMd#4͇+B660P>4fccBI7i7(4{[ǃ) 7%fFΩN5FyW8}n$+"B96@>Q DT3݈g?сda@rB+hN0ECX%CI p=xʦ׎hmX%\YgBH;M++1nV9<({<p}MX&DE<_r(;+'`pkiN)'$ 1njY?tu@g?E[EhOQA(@p(6g*c?a0Ma軯RGFGFGFGum5F$)sE!G"e"\H0C(BdB,ڣȾzK0]׻nzųi]K%KidYJijwmFW~Yp֐A/i ,x/m vkFRU5=*]n2ZS0`[<'`'0 ML6*%XJg#pU;tv|\Zk?\JBJ\IHBAW#^rsN>IB%Cyg͖3/liaG͖7{rCB0VGS`:(0RE.ƧOWxA%*9KBU'%07+Fek<7ZGyj>yjK7fv`Z>uy\{r15?(D ů 8ۻ1rg'F)++b tr:1= cc猊K힑FK9ZQ:7?'JYa j~e{ꫝX[etZk'Et@0 IkdL I8/X))R;klmԼm% Zybp}kÝ5xX3ȳ 1 ܈˪~/ [g쿬I'}Ϊ`29&q':W2p:2+ȹ1K䗉2@8x<Q0y|)BM 䑥Y%,j9G2`Y`EplBeԏOqeʱ WXDzz<,#GqZpɔ*%ΣF1}Bf0R;;:1<4w ߒdIQ{ɇ0hk FBNVRA$v)aD>y$rPVN qІ1OۥXjk'*}uBLdrBW:H굿n$p*qhuz\"CU%]ϕ8;N+nxNЪ>\?s~]A1$zn{WbF@Z$&uyԱZAFdcQ2ETh69q[!utd1yœ-E"2#Fkؿnsz법U2Ù@x{J}c 5E{A R#jqZ;VqT!jؾD7vB(9@t!@BӀ!ZS*0UAfSruEw厷~*suyl䘸19%\2q !&#,FĢz]k=e'7ED B6H)5Y$fUvwxvNћR!h9f|d;.Y$"gŢyT2݀A{Gʟac`v@|ͧ_{-5"y罇d q&Z^<Y@{;- }5T\),ArϳUXrv!G%Ƈ|F@~HNOvƫ~ow*;qҐx=Yy!S]8N' 0~Q'n)־5Fng#U搜zK%џJInq{7Fɱ"n*3Чh{}j7#ۏB[iжNA^A$+/Q|f`^fxg}׽ni‹ajX >{--}x'0*4D SFlǴf<ǂrsuo ~~pHa{qmQQ78G "vT nn:.́AG0XhH&S6rM4LY'N+u6a-7VqE-g!&ni?u:pP^˖ 0$U*S̥wz+q:.X^*uKUkZVPBs*WѺlZh^E#U%j-} LcnK$=-!pV=F ^Of{JW)/Y9>>Лf^&<τo V'qՇ N-/GA3 ў0T*R{͊/ZNcF6&M2y7oG)nzAb oJI+"!L9O_l\.#"AFzWTGsWjVwwbc""C8? ^=Zt:SpWE>?N?6:j_>־a|qT+8G}7pۧF SxƂ:}+/Gr`;F%^V';AN†+%39a{j{)=gm|bI$Hb4V[Ÿ1! 0S ĬȬ(XkG͒Y3K~͒&_1} P,;֖X!—_[o {(qɡY+zX'鮦KQ^فR)8rޅɸeOK{*r{BMdF^/MXnR~i5؝pBLf굞ۮ?~F8=&pVur6Pd6~U_v^ѾWiT oV`Rچk 085c4Ĕ5qRv9'@Q_T ; 4iޢ.*1j@̓w.ìJԽWk_L4*m־w"Ïs3[$KrubjEgi߼|7lOw7s,8-{'xE_JßToLo(x(}:zh/E_ VՎl,'y4Ȍ5~ܳ6txAc醝=n>JٽuA u[V~S1۶*O_V}tұ!/gh{ $33xs(m?Z5ia.%uMC~ s^>Өil0O.6jbט[5sRFc+ĺ.3U9 ԌZW膢b})Z>g!ZeҚwT^.j\QoZW+??j@rV6OѬQ2Ngxmasx#ESʚw*i; TOzQj?XV9O'Wf+lHۗ\nn3_D́>z̑@mvrVNu%##啜Ff^I9uuJKVnœ*rxRy!;R]5VW%'o}<Ɋ+Gv(-'%#j4b7Llnծ PnVI[I%D̾T>QgCS`cqj{.r; C0S<4 nu/)nim`j @c(k@Z-](Xr F \l a\Q8r#Bj^$@ 1kݭmHN7 թB;(ڸS(n?7 ?v-Sn9m-YX|]90 mtbՓ󴏒R{Z\03ҹ?\Q!(|L՗&,+.U޵Я-X;;❾:b+z!P!L6d2L,U,p<ש1 aȏ_f+MU H#KpG`?Z.:BI)BѠJT N r@GԮPg/7g~BDZ_{Ѣ9\(^*aŻBYUp7N9rK@`US;pp =\Uav'R@o"M5{ +gCj̄#k‡ӹ*s%{X1 !Yt"8UJء@" &iN"GL~÷o*5p9t}wu92KWDB$:QB)CH+"#8bf82YD*ш(0|80An/ ʍ\U.t>~$Ok̟̓upGVĔi.rwWï!vOFj\i^E>ohdB 0.և=8&QoQ$,NIRJeLxB#. &qAIf$:SFT&:JSxFYI?l2jj勱Saf3䴰?Aif,X~~,LaA,$y\/>s<Em!O?fYjhe,֚Uk.K-TdmWUtjZ$Yҡ8,o*3qHz$i#I'2hCf$BLOEfcRTg(& Gc#iJ(-Nm, U:Nvnx\W9Q(mmW/vkJuvAWOn d9SMiR뉂 'S:c2 T :Qc$YlbSc$cĤq5u)㱊h'dAЪe K"s  H&8Rb KdRc,~c*3JZ h I5b`_H"L $Ў08Ag HE<  f ID8MQ)*R%kHDy# W蒧21 4Ú }'Ui%0FXbp* OQ c{ (Y#L 5QN3 ӘŚH(I&s#P:e %)|U&iJ(,cQJh(xHr@ [P ̨қД(Q(6\QI157$8N ,]e) !`mm*!w RY'v=m ?k?w,S=ڭ#wl}2;889] i2q|> o`fx4p8~36]>ƣ)~ߵ6.OD,J@XH5%&! !H9'`D 0vL)BI zq~|ǹ] w%'~IB|w#{y˹+s>g;id@z$ca EfeIJcʵL3RXi$,}Ie^Fu/lb)g _L =[)=_+<ˎPgS/ _^b-/f_<} g3bC@q10QdHkŌCF  rI$fk33./f^sHLȳ$g3y~:_Sy~1glb5Gԯ_,Ks.5ԲD ,]fY*zUڣBҽFmPEiN~0Yb.x!$?Vʵ6NJc?X%/e$ NrQ+5غGMsY1?j$%FmRgX;GFXqG[`d8{Mc,srX'0<1_~y{G$1?M|;~d\Rǣ*NOB#YvrGMӇQUeZJQQs*M|\=̊ *UXe:+nfp9a:pm;>ޥt^\L/nErZlXE䋏_|gWHY_&lSK"9N.vc%*ȑZɔҝ`Z7D'%DDzMsi{W:`Tպ93Qu`Ǻie-UDǶSC 3֚u Wn.$EJu*c)aq)Y7ͅZ@Zkzv 9s));аkxOfA%ѱ$-IujvU@B\DdnFHOMJxݙaP㴝d6TUlgj{^B\Dd w?ArǺaY-UDǶSׂR zzUEBB\DdJ Z7;bP㤝rkԁZ} 撇T}8 TT}4 Clȗu ֽ1hȭ N5GaI`!/9e hJ!g+l[5G!'8"{WF͔ !ĩ&pV, F]> y!o&HBzW+C9Ęj&1f1 1cv Ř7^b!BhZ >čb!T8ay!b.4ӿ3G- !|1f_ !|1fN1_sb!V1sB9Ęj"1f 1cv ս3,h;c>1f1kM/,ʷ*B9Ę]hb1fYB9ĘhD/,g!b.4c_!|1fə_Y>!bN5A ҿJs10_Y-B9ĘjB#1f1 1cv 8c1棚 +ʔ|'xe;!'$ds,/ ݙ!|2^b?F{e>oW<{ ?U?`~Oӻ{/ QDa}|LgA:- !%RXوDGF6_Pe2bSŪ륞Uib%V '"$8&2faVF$e8K1OS$4I:2T8 ) ,]-(N6 $V]sIW>ܪQ⼶vq3 k 4ck/A`5jBMwWeUYUYZ;D!#8p[:8,fB`8TRSLY# (FhC@I|-0l@9:{5YB9@h J+C nT4@iaivʎ& `!{p뜰Yi*SS)Uc4F*m85YXJG,$WOY =&0CZ``eD@"9eAdZjBd0:~k#GKEdqD,Hj7v6-5依 \EGoEtVd!mI?[@ DB " nFaӂ A BDU RLd!gdiIsjlҼCt\{qɚhKW{BΪȋLC]uqC(_tbl5'#w(!4KJV$x"w0gclm>fppͶ r˙տVIᶸRdt^Qx J_~~ktyH'aBhE0"M5r7l,Q$D8e A_`2^8%=+ț[|B͓f5˾MI6 EʬuB{ɣh%paF%\X0ڨH;m<Ȗ8=H85?IcI3y5hM64 ~4n5^iLSiçƠVZR*Mq ty,<?R4q/?pmp#p/x*[M\vg~.g\~p5-%l̩̽?{!vC˪8+qKp33 M🧟$~nJm4#"aj:")=dz co*1+s{6|\1u{r;9yv ᴾ+x3z1_~ۆ^O06v񴢳r?/冯m5znzq ͣvϦ7;kı@r mtln| klQKeC풩Z `U G7Ctgć2s'iD 4A/]L~>#]VG$_#ZG6dNŅgL#?t뙷8*Vi XuFKix^]ǰ߼~ [Z#RZvCWyFLƸ|haݖmY4cRIJVCoQy2tkTTخ[ڀf+C=$% {P[8ğ4+HZِ֙k?jaf,`lSŽU8yW\(g,ns! &z0`K(֔Iok޿Ռ%{KE5z o%?|NZQ]ӃU n@(}|*z}= b+t2 gpMFx yLˍZV {v᛫]ׅmt\  ^#/7+ADkKc\#8}._?{1|ZDQ ĬٿF-K膨5H䜲Qt]plDPr_h1 ytY,(iv\qP\ȼ-Bp߆bJI2xՇ,'W}KF!Xd,p)y"#Eٚ6z *ͮY?J{cH*mnM 7&p_⸈Yg2"5:ӌ,h@N z]YKhV=eh r)\)#Ŀ&fZ1IfmSXhp$d﭅la + dk Y)]s+]}2}JfyTl uJ)YybX'RܟIP!!cSʤ({ϱ;.Pt)loQ[pA=l\tG` CNR qؖC.>.bۥ[~Czo9VwWٽciҒ#F8/U|%â|Ŀ rOT`Bi1ixf)p`&plM7J[%Re[V[Cɮpd^}yH2FT)ykKy'%5xErof ~ EyEV TLav-;ƫ팺2I{lAxdzSHPzf8oւD0YR:olt*t 3qK!wܦ⇕ PC6~ ޺71tR?锳?e*21H0 &0#,J,f-Cp͢tmtmfOJˏ5{H2ވ<دbN:od8CQDR* Bgv؝a׎s 5wBr-i*n˽VIwW)ZD*G 킣wҹFYN)uxI p&Ί1|VbXpSTu ^;)$zNz% 8 !: O[,rʚ*\FⲐB 弄**eA7(G`_#6#gFkDJFkX\&Z^\a.yHդs<&⨋ު[|[| M>4NM߅0v W,jPz2 i;xG;{cax䷣_pEw>:Wkl҅/h#G;Z-Vѣ AYq˳Wwڀ6/_[_yׯ^|Ϋg?G, q܂~W4&Llӛ; &ˏn~ݏ Y_pn|Ɨw}x|˯G ;s *ݙzb;|]3%n b/$jdɕ4hm6YDXs~g!y8Ӌ^=nWE!AJo`,_;d7GM5N+ïYך_˓ Wc$a|{ ^vY+#,Ϲex1ӡ}4-ʟWb^/ӤQ1h%/oM;y{C#0*/䇥7/U1g0~(yr >o?vwNt_ϮhFm߀O ;RԞ^o笋M ^ Wy[=cb *0&NGcc"#gO>_\/PJ ɵT6ZWϦK|1x38:DfW| ,/_&Kz&Wiᗓr& I"5?cxLTOZ5`ln}GRj7KpsSp2?TT<:}^]ٓ"rsg?Q0y|i4x6̪:81KH@YSJt 9˲j3dA:Rl>H#,0v*^z1˽2)J ghn$1L,ӔHʨS P{g]f3|\[:b/?oߝzzo~W/&6E+sXVY+mZHjD42NՊVHSIK5EYƤD3K ,E (z\%bO&q,1,g3i M_pwXNuc$tOJcuWS7-*Ӿ;Xj`A>qOS{ O0qo[;l{bt9>q@pt_D1]TF*;cRo8|ö%~mG3hD<) 7+6|Sɛ_9A )|w:9:@pOV~ 09}yL_{b2}Mo:&7:4z<{r7L$G8=C?D~諡~bK6Vʶ;QЫ-i`̆HMDyzq2w^|95/~v6.,w7GQG͗RqgiXb(Pp%rHrc9v@78L),MlsAT OM$U"8Oo<ШtgBhayT,:Bia {w{3` K7ِZ\rY9s6m(SϾ?%/tQ 0wfQ^^K|+㿂5r p4· T&_\># ː>\X_ ZjNFK$><LN dk0  jsQT0|WYtSޮkSBґR;!nJ,` xxK0Hz#aאpW׀cPg )MZH8|:Xp 5a , $e\t),-_ZQBW؀s(Rsu8Bl̷[a霷ڤr$вdHOh^^x/vcÊДJ20)BNRߢzq[!-1_'@U|lUT1N K]f.rw1ɖ<5.*Fn3hNDŽ5gĈVr }"&I\+}*X~>!** yXVf]&| gb[ITH"1W\ap/ !y2BoDl4~쇛 uDq[\T? rTX3:RE/wrO:b?n\\ƥm_.T -n r}/gf+Sm8JQs €EI40UQJ1fZ, *u1fh]W{l!8ÞY"(KόRi;"C9}VKfs3$rEbjC5@_l1\lY011bA1VJW \Y.YQBȢ,&`w Qv] {FmQ70mjҏ];J8 ǹbi?uV%~nL+ՆɄe{ CkِLM+&YSTs*#%K-KF:*iʳ)-C&b\܄k. jhFNW R!XPepq𻏝ۋQ㻜O82Ovz ,a p9X̃џ^xU# }q߷~ಡG A=ƽQF^lYjdX>tMcHdRR,&M^&(`'#NI{M)Y-6'##W[\6 V$a;{b҉[(& XhWg]Zr &7bהX\SSUKZ~ P+m(Ÿ7o/Ã̀؇B80?]ioI+6fI}vl`0ӆ=#Qjc1}#HxY*6 b1+̈45vOـ\xpbyğ:|?~|77ػ?KiՁSK(Fx/>RJe}3\_ ) g[ޱ: d֬ɸ3; 4򝇭-Ҹfe1?v8}sՆr„-y^^{+Ҏ{ot &`]g6޽mWqt3VE 7/ZwJGE9}9@`iiGAU(q bv'Jj~EI6|hv/^f^Z琮8%xt0j%4|?o0:{>=+;CLfZP/I+-8wdWp_g!r 3W aĔ \i.[zY\t!>tPD/ɼQ 8at{>5/5k2)V=e;5%ˇ9A 9;o<$cB7lsw,@jj9O2Xp,&RBיy[,UiI:x6!l*׺HM0h3EZ\0<,ff9HmIAPy&T)@, ^6RJ4s]=|.~ynx[vaai[}ܴߌ 'zg}1-pEk߾]ר=:$L |)}.3!!?9@Y9$O'jH7s;+ M Sw! Jzq[Sk}4qHv#t`X0IjX;"B9&ґ>q JswO+A5xF7Q+mubLj(ep kmv 9QqGIw}B"*gĪ1C" Ѽ_69ke%./NChƕ#>Z#2-PFpe]T %JJXñ9P!)>" /vL v=TWi^ QW^fʣ(Õs(o ZH1*)Y j Z8|,4F6,.{~NOcF^3 YOσ2^1ḶV%gϴ}CcęVs/V+?OMts} ($uKpڰ9^+ 0!G0瘈 R_C{_V H+B_F&, S4̊8&z쌌kki 1,ZӠ%5dW͓ צ-Gpd!v7BZXqD0S4)+lAS`b9i軣 | k`L2*7<<}*yGٌ'p‡t`of| ?Nɼb2r,^x|gюWg)fb zcrbE*fo6nP& /9I=JBd[w' beX3n~cw2 ߧdΛM"v?-֭)YJlH![E}}yϱm e?>WҴa9R"w_GNlàH)ʦ20/d#"S:i[I\yQNM V*ʻBP[sV[1XTYln|WTe!E׌6]lŗ,hHWHduk'Kȥ^Hͮ6lC{Ө7zp׾ fz? 0x% pSXH{RrNqٙˡ;Ņ57GEJu3pJRJߛitԞƓEZ}V{*Iь̧21!<>>NCm OBZ j$z1w,8& @޼cd24z tUbOU(gXXP P\nQ@HjYi%hIT}lXSWg%&0AڮX8 zɳF$6Nk jO.S|Űr:Z7H8a*4DKc 2qu6}p_5{F%Z̦#Tc/,7E(]o.i:x緽?M'I!&^Ruz=%Tm=]Zx?=qy0YqU "}l}#%KPbCLW+c6/*Q\9Xl{Q p,[i8I˴T={Bǫd4,>ofH$ ,~8PQ +P:b2dK7}H-$pE%lt`@zQw\82g%ܙ[NyJɘ"m/y&&<ӈf@V†v^L<:隽{KeW53;B [:|YC[sOkĻ(lX"; Vi$YA310H{C,4wU{JyMuFFĢ֑Rr$mzg"4Om-R \c(h{1SXPIN$JJj-ca{.0 Qd VaƘ D4}|0FK(JvtD#J7HtC1c(21"L]mtX0^;+^py6l*" I=E9ﮨL畡OPAd{il7d&r9Ak8VTp0:μxV.*H aeSTbZ0NH+&Brt}MLNM7S=R:{ _)@vn逃X";+:OVa<)xV2}1@o@@D~5(%I+%tF"MIFʷ{$]d:ϠBsr@/oXiĵ s|{e4Ұ67'%Xt-n6gخ)~c+dF|/ouRZuLS~02O@ *΍ާ&u{q=겺8|NE͖XDԇ0yy)[/cr^/?3 7Eta+ë:ēʳZ"UIyN|`, G0sGXWv?}o#wS=yNDV]zrNM!L%s1g),kp&7Jju]JHCЛ7s^=]2IY(T)|ds6hFȾDMEE6qGF>Øcp9a]S-PgZ8BtwZ,Ep,yEI6~CR ) =տWwW ɚ5^Ġ' 9#j|HҏgnӇW\ίeR e¯~ ]Tϒ8-?v;[7߲| <I‚FP,Hw7g-AJöL6tѲYP$9Y Mܛ'ꚤ-3P8Wy+`6Wg5wnDpqo]؎AȂXB; 9e柺V*#B)}I脵h̐4[]7׷i ]/T\Cw?]_yKfa̤Ÿ"Ri9c'+xSn&*!q3(Tf]ڏ%x8:GHZ=M'/7i$C{*6qRòꆜ\l V`5N<gZ+p2tò =de(2֠>nZ4.!n>wHXcƃ0I=}N$p)0E.$kvtSu"V<C#g @Ҷ (~ s.5j,1 cm H`I0[x0%/na[6lDf̳Nлy y݊_tE"fxU@4N>dI>3=Oznr$aeK\R ͳMLcyhI:F?DZWi|14| %?Ms6WJ]&9l2$u!˴1L_EN_E3a LP ާ\y7 \Ԋ3>:ZJ2 EA Q0~̳f9|6;rTu}hݎ>]}]nߥ]OpdV"<, ҏf@zy+K4v}rA)ձ Cb&Otj} S[ơa-Q|Жl@uC+ o @SNjЯ6[w]eKcĮʧtl_p_r•fb;k}ɜy{Z*X.n Q7LX-{+%\^u1w$;n[BQMwvi c/hw1ҪfTe4 X%`isO?]`րSCg {b}Ur'NV,;bQx/ =Pe{*ۆ[F>]Ο;SyEͦJn5sm ݫ~ i5wmj_n|l+@[Y~ӸBOLcH!+ ̕M+WEIV)T6)TFV rS) hO,{.%1)Q9fMbW6WSjWEWwM^rRf XIT+s&ة,|@FeOe* oYM,gzaapQk ge&f_T778 +ʾ28mi6zx}]}@Nm\ ʴOyS܁`d .YLgE.Z!ş]4y:Rӯ )eo߿/?aĕF+qL$1mleIw,O8DZ7BC-X&]^nO ގ=c91esI )îsVwM~ 8?:*QBp?i70Qs!y2<ˁemX-k'8{4[g 8 crs)u0h*I?`澣VaUDfsHO1]BbzUY'9P;jX 3; ߁|g'90a,'#|J8T#bd4K1$V UÓ(w'ѓ#>(&{JbCA>eV6p d@Z0x=8/aR3sB,d1ID&]%Y@#5rn"3G`WAE )oX}–ҚFCP/wJ5)F5C;K&^d&^d&^d&^4tJ}2>΃"lHN.q$i,x$ 92;]_)UL9Hkģ7hRsnr| TB@2R xHDuPU2lj M֭x׸]'s~-ݷ뢤_i oZ~~x`VC">||}CceȞ_>]^?>ψO;nvj:{W^3[h Gf•$ݻo\¸9⹱h".>kb-DU;fe9#[dCeL p3LF%&K삔i ⚄;{. O7i4>~⋓;Uu8볏_:F>EN ^EN ^4ȇt^nڮ &>pђ N)pVTI*%qU,\|} d&sBta_DyՈ .ԂM߬I֍B"/"m $7E#5%peWM5UJ`omۡWCO;Aj5\ՙiS'"kq'1r~5|,?@JED2uLYj#tO{A^7AZ As@1OAӫ߰B– ]s(bӔ%[͉p b'UM-' %GvކvۘTr Np$9kRܳ NJ LYmЫ ң@AK*imP,!T(LNsZ-CdBxI9h1Wo2`g:C&3PYfu @k|ib-קHwp *=y);a&2s:JjOA,EXĒSU9d6iM{!|a'ZLv>m-G,&]gx0d"E/(2H*mJJIr+:apDDJg'{G:]\272AVv¦($s5mϠ3gd#Ĭ29aL&Іkл|H(DXޱ 3eQi@F FJ>rMXʒG,!EF V{4S(8 3q/mPjɑV{ eڃ!nhHA|(_'+jx00o./=~J9esd.?QG-x?gnceooo;lɩEgRYdϱ~O9F鄪AVE倂%Im#ϵrI=y $qbə7C/r=9y.ҍAPdP̛KHL6aU{[m\^e\w *" =xWn6nKU;f!xRLRYU߱[pǓd 5c:}*:L!ԉ7E]v^l7S.ɚŧd`5\.<[yn\A?Gج4W !w8@+vWKHc5o%ٸc2٪ulIOaEg;`QrMhwԅ9)$L6ApNBѰD r Ѧ%QZr#;B[,cBcx|.eIsc`"W(&ײґ\D!OM[_yb8")혥-QJr)/gGRH Qyay΀zG8mmoܸ_U+ƾO-!l'::dks>ns o.fʙ{2o% Ve1hSMzrU I7c: *_oN>BMʧ+&""TQ)]_YF6"Fi,yY3sH˄@-80I8<̛yAрB4 i8 8pAVeG ہfI+RTGĤ ,aQM `.plO6ya9[]_ %GӢ7H3p YP^9G )g uKP25\GKcB،$> } T$Z<  ʏ@ G^xqD8uį~ _q8Æ])i|^Ϧ_cQR] bURPYtuO/߱=ipK^v{QrjGGQ}/8 ԬRqNUm+_k. "8VFޑk'ډYg-v;&:LkUpcyIaP1Myz}_#$ܷF iz ˮ<NrDyF3{wi*0~\c!O/7Y⃢O&]0p5>*opx?CDŮRW|X#/HYrQW\z黐?tAɈ8No>:sڑ*Z[K~ڊIϾ2ⷮhL}^$J]0(^'iSɥ :+,"7M$I汋0+&j() H GCqdFE A52K+F(bś7洱zr{U{%:eT>{얪b>uB/⯷#yl.}Ţ<$nܙiBO*5Պm|@션S&s~/BZW1h|E=ǹy[#I7.d '/Omyz:B%e*9tGz Hzf2dovy 5㴠$O[dq>q,wm4S#~6ﬥ,e AS_nj᧿'ߏ7)~A(p&W-\Y1F`#y]U:9~|dhSn~-Irm{Ih6pR*eޗU 7wߪ^ZC8g*W XYX O"TG>d;.y:s0[m2@pd>A(*?[ܓq$9v;)̻*zn9[qWTJ{ B5\v 9" G).i= sbaUVЄ.(*ڣfu¸5H4l]+j A6@8hp6pnxzvVZBt b*'.g Ԉ#N2oEg)&xmb- I!xnE7՝))jmGRոXԨ'.(=PG%';%E'YI Vd؈FZ@"j))j%TB'^j2l‘rl[cw? }ԅwlr*xr $hz 1񲯹_5}xW_52?" x$lqqkӃp;GjMoQDvTd`Uux O-j79q8aӳ+ájA#E=s#@Fb޸3Ivբw1ow\c1 [6b;zت3畔G&1$TM?CИ<#uNUL@K-$#|33HhY٘wCd\p4 .Eg2Tt33 3qR[&)0'Kk~E=D; 3UJi/ff ᭅ^JyoSW_: sU`Mfaw8rZaj(Kx8F4'= qyYF]+{D)J &r5-W Y}> }ilSt,M?0QJT,;$UAko8%T8{/٫u^ $V2VB0xس1G?|Ӊ)~^P5+ *.bl0:`zM{Y]ws1퀒d@{9u4aJ_}&f RjWnqb-=׃Pb|PCk6ؚ Bu ; jҠRy:7},&WN#VI#I.Xi,>H<.av)֛L$ƠIHcҕ%-byDLno.`=A D7Zѣ̟E@hpO..T Gm(GP={{Y78C\jsBµ#Bx6<#{Bpʾ&V@bTQv!ѯS\++L&{EVDxEP"ϳ R"4wmI_!p{``H3.qvEhQDR~5(L{h#Eijvy:OZ6lGl͡*yxtGz S8W=h@'6v{ij_y,ˮyǮ8\S=WCQ#ziI XQ{ː!F=-%=ƽ+`q ]Xwʊ ܷ>1Bdzyry\cTRpGK4EIMV3c!6­OҭЁ5프G'rh9j}W)9)9|X~89j  %& 2gwkMia)֮ k3{o&m0I{g5EqC(g3ɆKf䒔*^r>#5IqtAzWszG<#S,%˖?w̚iIL4vIwuAO/Fa߳!p۟ *UEJ)])L"y'tsz%sL8ƹhdY֏K[f0&?g`{Dm+"8% 4}cbiyZVĻ6~hUzN܏bJK_1lfWYZupb'9t09PL3:U$r7|Z U}p-Q-0.~/ٙDc0;&V*2W=";}ʝ |zFtZxJNu^RcXG[&=z'Q3DՈ$=gT!i@<  f zJzFF @Dnb3zV]CĈx* )Ma[Nkc!F$*}X#⎣3'x}^xǥB p8js>TtF&kM*،7\5T8s_31IFŠc/A}& ˮΏKo$1"~}Sxc"z))l阊RކDm@7K"҃8P!`ζ'l 8.y`d&i{'CϴpjTpp ,&nfWdWr5)47Z ѥMMqwCkLbIl@k<@__6h4Q93{9\s)Dj:-fĆ=k(;JDޝ {$Uw?̋-8dsػQlmvvC$93RԳ`x)͒k˲| `Ž7xdz+}ɛ;ovv+Uwyۇ]v6]ΞYkodE4VHq\Vx*q' >U蝿Z #>|$ę_'?frj^pλh 8wfV F3J cf%>*0&},7s,eWm"o 2z|bSsݑȚTס)u8|6[ւ=q^ɰq?O=\hO!(wm,sukOb30kZ9JrMF_p+5/zXb2sgj,V/ޕ&bmzTP>˼<(:o"@lTu|GgqolZYC8PyL*-k^ 9뻰NڝߞWըoD"%gC#ˣBEfVKLAF_p!Szar_C 3ᑌ V4.2@QHZ($a]fQ.^u&6_IRpRb%+Ǣ8IrWeEQ@䮤/f꾨Қ1%XO1~  T|<"+->CVU$@]pA{>q5iuI63 V8l_ X'6A`ibAXA϶]-8X[ dU~Dȶc 6]]R&c cõ) H} v $,NNN!G6uozUx+!+wk43p/ϤlA7?f𖓂V.'4sfpݕ`hV˹kAb8KrNM[F:DgP>hs_6&92e_}`/O`oI45V'G#"LpɿK5 Rϣx}#{x.+-˔sDD!A DR7΀LF2]spH@w@71F3W2'K- Y~]6*Quߐ4 k4H4gX]XSIt`u&`&'KkĴT0Cو%8rŎgXY \;B'ֈX3`t{4Bxp&1-ի9_o Dσ)bȎj<.dz#C  s,\~N>?~0/?g4G1̱{#fT%CęqI[XN i4$`,['q.D/V{ 3]3C LF;3H6C 3{T?ne;+e 0 cx/ ǣۗvogؽY~1{fx,|z4j15ǰCk*Z0(,1Z=1n(,-T8M)%)x';W{$()]B89FRx|EڪDP q 'aYg(UƗRA4cDøbì\9J ]qm ֑㫥R$Zqm/VXq ԗRѶ.i*9pX4^ X<}E|%Ӵ4?r zFͦt1L>W˶-fMua鬒\"*P@ЯơP+11mpHL:tT)x M͸ ycѣZ'r/ Pm};V5fcӯx#_Gfsl OC׻~;? /OOO9 Іg^tT; n>JJn,a?A .j/p50_ˆ4y;[=m@KŃ|{ht4[.&ԍ?@-LQn݁31$ā#ŷI1l%#<)NHH4ZӄL)16CpQCNjÒUŨuL<@n.]{X)jmhwnfgǼ0Ray[(ΎкƳRLan#Xqk iBkXp:%O)QdU.ࠂ,x^-︔RH&W֓1Ll碌AQF(#vP;(+[hZ$p S:bJ`%4 9ƙr6z&rc*[϶ mo>, Gcr#q]PBe Z6J)G}%OA\d=(2C̡̡ei<(x %{k,,1'M;Y1HN'jS"lP+ٶ!A!LN w$g Uy8e>;7;hT{ë'e~(8ӕ#KܥQIi]"6"b˄,8bXb`|xKw]շ-8ŢhG#"r 0dCV-X̣|pSw~p>j| w_뷿W?99=jg~io~|3t3_}b?}ݏ??Iv4+?hki#Y_չ+NDD.wutB=9؎mV1~B{l0Z23uuu} -ڭ]7Y'_tGuvu{}۾}P-k÷=?P;66߷usBINFkoƒo|d=j㌋o:]5~0'喥X`?~=D >wl{?1مA>03uwO/}S4kNm.scXα ނϾawL=MhIVBoq(;߫4س~эbz}}۫o+z|/|GJr:$#w'Vhf4|ܞ.׭c}u>>0M{rg79ί@3\9GyN6sJ뀢LфHltWolR /jGY6|~?Ư}P3Ѻ,橓F!|l!Jw˃o@۩}6iրi`cFOГNץwX:<ʚ7ߎ۹=tz#rtajk}펷:k[m(eg߷z _7흶?? f\_*=*;8Њ>jZ,rݴRPy)xy?t⛃ryGFC~ -.YLP*6Y+l@n7#lV@ 닯f@ @l]XD ekd6 @a*>m\V-ik@.QdO?m#CaGBr-PakX m*-UXylP$3'-"K%,P`نD*X5 ~M;ϥAAk?cPEhgo cR<[p*_^`b˰bqdltB1"4"cay^p,L닥'sf+5eLB/ba|kW^ Xst*/yO/ %b,%t#Ϸܝ5dFr*åXHƧ`Lc^hJM$ ̈Qlc{MF<(1mbm9ls{ae?5,rvCcXNTOIFdG|{ziQWAi k^*:2Kă*Eѯ D )7 z7 z7 zy z8Lzp! H&Re}<rb/ԙ~(Ha30Q#QpCSa!SLuh]]nN $qpХpHp8HZ@s(`+$ ~%{,=lhJl[m"1 K{XȀWq`25TYil9L PUHTc<s@ }L]$T6A$QD64I C@la\qcs8H2vR+Zb:/(A8TY2*"'H')\i s$)!@tҀ*@t) Ka0*D/WWNѹu# !J`&F!x-M`B Dʁ &/5|#V*- XAc<V[J@UQP SK9TK% ؗ*`t͵0d64X`{)J#Vc?"4zဗ9b16 9P9^"oTI।HGº II0&Fh)W"0j+ZLqJ3`Ϭ!R0G%H ɤ5J {iYȱ 6=) K+c,Ty|dJU- VbmY$%1@В90@wD`cm*xZʘ+ULϝSBJOJ9Lv(jQHL%1@TJ>,@yz!yx,b&hǻ+3(}_*~Pl "^ZY3vzc{0Me/.@OAWKMbo/l1?+ߵGnYdѺ ( e6zk4*<W~vﺽ?6BNK-)hH#P> 4*7 :1ËgMJt>/M#|{)sAy] ( O&{ۚeoZMėiݝIb7WCs$a*3W(Xr]RME䓎9>{bZM,Jv#KM^ڤt(^b>Re63BW1h T"&"FÒ ΰ$GO%Y_1"@>)GTij|>c<1HJ4.1HW!aZYLEpKMeJ7)atElql3H '44}GR+W?dNe1x,I$˛O=F(Q &χT~kԂ8żٺ3t񓶓%.,T*̝ L9c ANBPHg%w65G_Nֽ!k&2_gqJIC?VyB|ςdp+Akai„MD`"%E5F96k#^2e: K/>MjkA[Th{2,:Y*JK@Q r*n@!w88?r},Tmijݙ#w2uM"ȣ,Ѣ BDዲsظZ4]v̊I8k/jA Wl鯵V/6X#j:^[Q>mn5N2;_RAW!3|ed,c]g򱡸lN/iGrd~u&h`ͲȒp R`>3-^[K _jj*ƟBlsW8 ϡt^\fQsap&fso:Z7DaYZ|ӣ3Tm:) ٜ{I2kv»Q$k[fC!2 ŃsfC,i.lM-vt't~DJ zYQǍuճ`9>ZkLg]8 ϨR œjbJ0NP'DTp-f6 lmix6.j>zH[] 6D *V}IAܣJTcd Ǐ`=O%L5lمCw ޏ3nDSJŕQ\F y"ZIZo통ݪҠTcTk1N u5U@)`帚n©,"R 6WXҘYg:;"X "46!N_8#6NI E^$~uUqSZb k{ڜWqIuwR(kq>G )[|-g(B;$Z%Rt1K]1R>r6i9@v*ذ1Z^ D4ߋ.{sũs1quTrCפ x. _"嶊Ͽy `i53;s ˃Jbԥ"Mpb"9ky58*W;pF6cJ&mx@WK+o3%it1cswob[^ځ0.go_V6/IDD fMӹ%:Oփ(g\#%Gw9OZ?H%dͬƁo۳\]n[Gsd@3&$諭"%\ﳚ2؈Uj͟|x;_pC۟kD'G rOo[šᲙGX/87cn{}z}}~]߾~ԫu^]^{yB]}?%dV޼m:g7 GH@Ukoq_W>8FN.Z xwΝG0.b HiY=%ɪS>in4{8[07tq3gt?ɻ㕪AH6$&G +I -V"8Sd$HkBr{! D|0fzvO|ًGxd1۱vbg<7/~ճo__~9u^0sӡK^ Ӟg&벺 Y6^rtCT!F9Z~sLSSD{_:wd7ʹr \9}o/nwXSyuc 7ol?Z_cs3~ !Cr}>M_@]wY_[D=Z"p׫C8騮À;_/}g˛;/oO֏ DrV{q9R*Q>N%쳄)={<=Q~r.w\+'. EK@p_!w| !@w?Nw{߾wsW|m}|];"l`96Dww=|q;`[֕m%ہ !R_"cpJ1KJ挩*TV 1P c{ )ύ;rb=<~o/ _>}|o-{0!!ٓ V@2n1s__^dZՋӢ:|~ŧ]w\|p&<zgl,YE ZU>'J7mʷ#mZc^YEX>g{Ry˒ Yɦh e|,oFJ7_&jȥ!|WoE%* In ؆CV>l{v-mtB.+"jT[s7MQsjLb$ puT]ĚdYpM@=42MJdB038De<0夡ujJҰ߭ѓ݄n^x4.};֦Jfh 吥pNڻeZ)5! S2IDR6Z}LMMn,Jh +&L<>j]8b3)kmm<8 ]=$gX/zEN32<\Y5d84CA |LMceJFk ІS^WZLu*x*hgmv3mdyDu<9HypTeK%6bV &簊eshփAHcS%i5C(Jpu­98&h +6xƩr #뛫9@l6zymE_#~χnvrʏ?WK)R5URIeմPnٴM@ =–9*[WBrCHii-T(KE{S*1DxW@3[+ŒHu@pf|F !ILm4 |VlnQY|-tTW7 y_d)q }7B6v! :zZQyH*kR CsڐWlb"W* "o XrJ讔J b5+uw g7JLؓtLָ}$1øeZF(˘s-sk즔1ݵot ;C/ rsXi$X UK0@2́9gnٙnS/W5 ,JϥAl`K  l"bPT6aPnVĜ==:3'65~8XХ. V -g57ZFbC(nF qeH>Џyi rwQo϶n'(>OGsi0@i4A.q}mz22"5Y0US3* vCٸWc1 լjfs `p3I;59_߸͍j `aL IU1m_1vg)u]_AeSZ r58!{&P%[A/g;3x^3zZx(<>~Ynᱵ7Y1(M ìWx Cȳ Tfu 9B O[ee=FWY& ? v<O}?d*hM5?9})u5;'ۧVvBv ;?  <&d>F5p %7@Z(ʷcm 6"P9> t!(Bb9$5( ؏ 1&rPBnlS@GSAWcJqS!yۑ6RnH u*m =UF@ ZF!LJZcčooG{ԽAڷ#me Pz˲kH%z$e`;_(7Bqrn|DŽɾ eev5A{~ $ W鐼tX &SꞳjl 8{)_.̓fyBUZFX#*28hO<7JB87u&k+ߎpJ~O{{Vi%"r֣b{c{FGLUGxP2%~̀Qc *;V^v#@eS71;2VZeP~R`vdc 䘢iK 2=`z$ݒw6uj ,[J(vO̙GBRD:ۥv8[C H1ZZDIVer?s=ǩH` ۈoTnY! lR`[mH؄r(?_CM,!85.}JFnv3~=:oGzE c98'VS֢}` z6:h(rp*5twwKV됋 z7JFyŋf^sQqtYک /+fe 2qp?0xѥI=aVٕFח$:촸]ڦY,Atidۋ}DzOm`ua޵5m$9ZPs*ȻړĵY{_R Yye \hK)Ƕd {4&9aq^oSY&8;^z)֜ʁ֌.o+6mi`*K+l{Ds'S|YBJ$Ls0VIC/J3lSqnR*ɥ#NԂҥ9c!2\t!@+nGP+rs|X) i8HMxfT 8ByY%DQbt(5D=e ^ xa/]?וg"@$]*@Xm"\0ф$L,i;,eJ,wq"R2k|i&T0},0Ih ьyI6@vcNGPD O'Bp}s"˹%4M}"eʅ̈́E'yFqw?ɣ!fS2=Сگyn}RDpL93(*E5@oN$W. dJ*(q3Eua>KDTlbQ#PȣRDR#ŌrQsyg ˣdRkPrEU|z*J ,kVQ0\!\> 鍪OϘ au!n*w{&m^>r(L"2eq}ΚQF*>R槴j+:`|ٌ.PzDWظFq0]Ȟ S='tߚqF{@r3_ {uē 4Pv|eXM5_&7Y{< KaYibdqq~y%eA5[Esu@ KtmֳkObRt2,]|r7OR_nt/ly8Y]7)=`B@K|3lż@ 'g7 в~ܸ. [of]8 cw0˅,uU[oOpxgj!;Uį[nۭ[wɎGÖan:i>l<>᯳쵹'#Тh65ksx[`w~WBl&WwFQd ^t>1`|9{%k`PD8=ҭP"hXV?78 ZF|o*fVv+ K`A,p@>?_" F6˷ҳ%o(u[# y"%S%3TO=*Fn<w4nY_)|0?&Pu!!O\DdJ8/Uby":h"b0MhvBBVdJʔl/nv ='8x팗d}W>8߆ffy~=Kw>W AQݻO}6z$UmϏ}t_y4'f,lDRfp$+(gh ?}g]X>(<m6Y}ǐ&{aXaȈi$<쑨rH1R۾)(BsH&9X9r"6g#L&T嘨[.Gonzb_-VՎ#E~Fw7okP;B]alJ:IIJ8RRbN'ݬc/K^@Ս&ȻlW`Mw)RG"RHZv I$ 諓T-X@iCD ,Mc+`!`)QQ= 5XQΔI`ssY *Da2Y;`6EX":LD/Xk~Xo{)')6c"̂h"$Il4bd]5 ADJ pU+ 8#3F"WAմaw)Ah_g=aB1ХnzwWw! mI j@" HT})<&5·љELY|o´˙V! 9 A. Ƥi83no[+JB퓸gu ;O}򗻪qvr*TLW3X/tHH/ʫ+*rHJE3aK>ue[ɜ!54k45iR䟞 ./&/|qBQzqt.&2X+ٕ;߹j,~yE:KY&gAi~BN=Єq2䌾&tqzN# )5-iʟt +N_._埩;τ5p"cC(Gi6;RhLX ̩b bg1߂BnP)ÔPMR?˔D`^8"SJ4)0i~kW+mbYտ* Tː%^oVg eθfɻc^m:ȱ؃F$M R Uː9ݘVgZrbqf}{ ҝ5 $u`#P` ]PM~2%F)/q>\V(n.+0i< !qANS`+|lOT?wfdžm3}&p`:S)KcV#)I%,UB+ v]naq.ЀYnȑc 9S_͊b{5k 6JTKmY\ȈNrXEr)xEǰQ 0 S;үFBρ"[9?ob NP x'4P~1e`P:x=[ zx0 jB5zƄ= _p/6:/_)J]k* (њc2څX4<,[$oCKŮ: yHZS>fJoAjn1Rlۦ2'v*/ԑe暱<\&5_)hgt`s.$䉋hLI$-Hby":hFS6\JvBBFT)uvicn<:#vыR hE'nHRR>2ms][c`UaJ bX ( ;EkBzcHuf( R!8pugȎTp--l<>᯳쵹'cb4߰',(/w\ @40)\_OcUA,ڑ H)oNm`\-`ff鷴K×L7go"d4F5ze`!3؉J +b ,B\u< +LXT`xM}y6_ݘLN5׉}*>,>/흶$o=ˁ0v=\&a\ }ΛA$@R*CRRM# uxlՅː2)r(k3<uv~+Zl7+`eV..T%׽-6źeJqg#2mVDI;t5m+wÖROLUcԃR)R[Qj#VidFNKNX?;%u[nX-eiO6O47ʉMK#ˁ7ՍG&p01`lnm0wOʡ 8}qH|yWr<{/Uin %L`h9 rI糖bS` yӣ0uG~+~ ~ZPޡS}Mx! [_gk-&hbG}wAnk{φn%>x(Ⱥ)q[">5`a‘@}Oy" RųfT*q:|0&qUj:zH&|huvAUr%fd*ZtH*+݂qPl)C1$ˉ 8'`Ό iÜd"r bDjàF{tʼ >d^ȁ<},O6Df&1/T2N N Jpd3fzJr!:ɟmR0IuDK Q?5r䀚|o@aݏ6g\&YˆI3\Hy5do't%Ƨ,zJ8RiqF]<%(B}_>h(F;*m| `U~MD1R+gZ{G_1% C L>L೭ndgSeH(VQd0mG֭ OЁ]ɨԞb䝢Ԭ]'#/;oK:9T*cA#)bQXQ͜W՛Ag]\JAk!;`LBH8cDc7&FwY&X&Yt n0QSY4㔰a(=J|:3rKo$_Pj| VX+},>VOHfwQ##r'@DYڬ( ؘ T,ᨋ49'NFeܐ\X{Un.֝NrDӺ67:䡍\qV_[WH9g;wxWUw/=j/94Nj)ݗY ~fy?WrxV/iG3qn-/o}'Ȋ lK{uہ jGYK0/$"1~ɲo(ӿEDdyX&@+y96hK=6 jmv[) Ds-α6Rl?zDh0+Jڜe[͆#g|YO'~F s`5_/|ەc%1"I$m/i]S8~cLˢ٢;@vxr[ZIOφ'4GKA^ ׵?"3lk#BC*(&֓B;KkRPf3}U< pk+08j{= ؃ }+|ی }=WmmdWx^Fsnh|I{* hFEv_!ySXy^-?}5{u0}D䯗~HDpñ+vc˜Q,GuOpg}VU0S O:Xir|0(!jhÝHG9gp6jbx^"+U1VyΒ tuY=~v4یCv-oG"SC*nf_֏D>+ ge8QH_O:TSs䂌V4kr6Q5d_*w֙]pT*DL%zMxU5LV( ^BĮJ䠒&pdHW7˥e.Ud"7I%dpmtD m#a~D0);S`CJ"{Hec0O%̻CRŇl<GL UtyTS8jD^TÚs}1jI?"DJۏ s'6hc 5i.fY\ȫ\ǟ_n֭bQ+ھ_7V͉/lGk~ߚ3CysrLQgH<%yBN1E,!V9,M$XEfjꉽzaI2,xJXE3Vܩr1@Ӓ**YFT 1$PJPblDDrǐ sFeyQ)N[WJ]+qTJl^>]S *A ,Vb,t60%ު 4?Ky6lUZI߼m÷S=>] 7_4cr (v~IVQ~>Ppy{J<=iuKNgP{Bg4CIpƺS<*Ȝ6gN@ʉuOtm#63+_ƌS)qiOV$4ζ[ޚ\#jH\w|/re;N!4eC{n_TC>c%Α@Q zo;mc>Fc(nUkrHQR2dJ8zMyslPG (u@> =j(S"W8nHÁ~uU*cR !;ѣQBxKfEO I8qK-Zpo2CmZ4bv(ص8H$jtR!D>ZtNAwOTQSGeG X]ɛO̻!L P #L'h $_FF%pVLB*)˻v(Z^!\ڽ@`%s- +YHf2N)AS¢όM!)DF☤I+RBR\2 E1`6&A,H#IP$af{)E ۭWѧxn3kw45 V`Vui[aULyǺLfen%Y8{oҥ%>wK?kbV,J;k&ڠ'IwrO]FA"Cd.;Dd@Rt :92KX:4WRY ǢfOh85-A'?u+d3bc_gϴ͂݇^7Ƒ%>"$l&%&Y DvQa s" = ' !-r |:[&|k2+ט$aQܢbQ0O\A\G ۜU6@]-F@:G+U,%#G5KalN?#Erlhc>>ujBx_ơ Z!(JHd;:֛AXgs{_M\q `rM_$t|bv[Uv@PSxslv6~op4$dCAʈP]|uMTם 'u*5V<~ګ!9TN:jxQ5 1irj-LJ.x5F`@39c`J 0Pc pTWO탈%6?gd(bH+S8'*8I o11G9/}`2&7$HR)b?SXFĜ"(rc!-U i}uenK (mym?Pґ<&Vii~;.U[.9&@W#m{+k?dA1aΖy0b3s} Zp_ HXTAYyN;/ ,RԌyV;OvHfj6kdC%E%p9CbXJ:7h $ҥ>^nƋmi(-VC\J<_forE#.f-4(bۭϳ',%Cq'5e1mW^\dx~J/D0 qgzDwEeBФ'kۺn)[`+ccۼHHCiqŒψu.ψv_:QU ^%(^.U͹ݱ,Q+9=FZ)-xdH8$ PP);DW}o*[dd}29aAF|,4B̈R7ً4{Xn2}i40%㶠y:٣Z\8[ Uy8accIY.w+Lv+ HV4V49MǷo$<g؄eJlܚ.׽SǬ#ȕ`6"~(G; _IrmQWI04\3.aNͤ4  &\ g89fNLs~a0hETL3JP٥Zk`gX^㺦BJyr^O=Xp @^>xX퉛.gk<О[g~ӿ?/>Nf,W|ߗ*}uuCtI3Sp=%,by]4ńƺ(x'LBƼPi-,N !ukHxJTxi~|5L$Kޑw0pU%r}%@IR2Z

y|"pA=y+!B3Tix=&2o[nJwNgmSVj7 @h6mcA*μ5`+XwYViԆjRuwR|et~Ȩd;==-#̞$ƅi7OMv(Y`_ag\{ tb2:(Me!Qj*xkm\_ua fa,-Uz`xaI@Z3JW5wopg_ ΄`F<IF#P)D*1, 1gy ``gI +FW)f+¡E>K嫴XV3m/c9-6kI1^T#>OcA elt)qT dx{ >x83~4@؏  Zǃ_ϸv ;f-Ӛ8 ciK0cӒ2A~:;[NFrC䗟"DR˽L R&9!ld9&Yf2x2Ns׶d"CgeI:\^q$hEyܷp8wAosf"y%{]+T4|޷+eUwmA(xtv^p]IE>n7?CvNvTZER 9P!jCOU: n2G[ŧdԷjJϷ$muT75\LxU l8"wA>yRU6@'7VL17QfVۛT߂^V}_Cf돌/29“C?oBqKQBIS% jm徂$ZCZEVsk;_nw~ d߽ڮUBMK{"^X`d۾>cL{n~RiisDlNStk: +hiZx;,>la |9RM)=>I$|a*~گx5D~qbwɉ >k]h.k~1|5+$SH[]$E@٦XѭK(=D$c)qβfVs>kj:}B!zo%ShX*RÒ0S Ye]Rv '9*7lnH`% T)Cnmib񰶍#u9OI9Nj*dCF !3[ª$)AaEE-D#!Ǒhpx%o\!o&Uh"74j1E)Q=’$bk\i,oXӨi;zЅ8@ZH7 @*紙A_sԐH,'v`ܻ}SU;ǵYM U{FH`cG+NOBR=~]tr #}o-P'tnu <YNb Zw"$BD(:0Eq0"tI#Å:Ԥnx*90g3c@x Zɞe|oT{/<sLz4|C"M37\:|FAh3N pgO鹞{rrX-Q`̆Kl|9ٌ0;uCtqpZڕgQ͟%z֟M㺦'^|o3OoSg8fXg٩q4p5}֢ ;7?e?:c4yio9~=WN.5˟|lxgzo\Su޸ @Aozye/^\\`sp^F30Po_|>hɗ"BRrzC= .7,ZO\K;C@;s}WPھ?uq^쵞|v%'>˝txqcgfwm"WZthʕ{C׮i^nS~1K憎-&@Xm6 Wp{Įx.]>yk:]{g<*.0|N2=vY_w^'/~u/:o SzO/#˜߫4hOz^7|tZoFSw|_,'spΜ-x7j{6>~;G! B_F7Ѵ}.Ns3?x>.z],PP'8/(\@}}6%9O'0sNA.s8?s\p_osc nҴq?qYM?J|0yA`#Ao`^ж/% 1JI]责rWi]['؅+ KǸkx㒖ṻ5 $mp_9(sd{1=dI`rY(gVr87MW zM5Q%~{QInRv⒙+ gNJ[he5`)sJg8J&ew.3<|3W{ODeqWPZx<_xWHCd0aH\2|%c(]m]ԥZkE7I=Lz9I͌)j6&$sFSN L ʕi:Kk 0z?aQL`zv#/!Mԉ+~|W wn 0 'aų3 [I=o_z};44`~-~z‰(\jG_{无Q78:ex鼒37-{`u_.^?yuKZr8IKRSO+SESM$\a)1)g,p!Pe*ep8ԗpP4܂;Y:_䣢М?ﹾ}ΣWyZ+RC\L?vM]۸E)b;:|u+\ ]Fye80s^Zt2chHQa 'fB9X@{BE[6ԲƱ!?eC-jِRBIH!!bM)1(%8IE~!e)C%C+BUY]0jzlB롑}Z.r[MҘxLg=ps$OXxYHcK̰q_,b) 8ĉ$9)_-!j Qc:h QKZBsB$ci1 lJ E(PXcӉLrPC%D+$DUY],K>h%DKZBT85'B2 P ̇I8mz %*DP=y h$+, /GGDm@Flxzh0¼-99`&I&I8 J-0 *UCy1b[BlR+FHIL9/{bާmP˅Z.s.UR0!_NCilDDHRS&Ja}$pѽ C8"OVEYRA[*jlwYYXf]$}'}Jߺe,A1X3]En&)`B&)`r *M r+xK@ٖnd]r.5  Hsfb7;Lv>\~k*I I7X'NIV u E*BȂaUY?6TK?6^C?RV.mԥЮԠ)%03Ve@Q"KcAu"S`:65E E v'PQe+G݆jGƿ\?8*oGGM8}B*Ԟw9@]˥bb]`Iq [sUAiXk&%#$ĒoW8E\ݺlux `,jRiXu>4*AI#)^Q@(_OG!56C.P]b%}p#uu*&Q7 Eҕ1)#Nǒ `#:f0lҚ0 T+c@ijz_яMbzXr/or]'oL$ ո/4O(19`0CH=Zogf~g߆-U߶mUXlZ&Պԃ\A>{%6VJ`-1J ߶D ':=y:8\.P^ͦnR4vL!V :GZNjvO>{ ztܳycpTGHR i>alŴh%/%ړva=7Jm}p}7nJTb~_,(r6G3lfJ6Z:6@̥?ԬRWxC v涉XEȴZHR} i>?wO fs Vvw1,Tu͵rSϩVA=knZ+a&Ceŭ v%6Vsgdho֐H<brHg*[ػJGg@l\u=kmHݗ=,,o@FO_,(`ECr3C6Uuݫ墮(.Lõ`erӰ|7-!uUog`DYiCw+(ڲjvP/VIl6z\] eȁHߔhhkajϹ0Q6&ޒ.Z69b$0hEoee-SWReVӴ>N&5UYrR{ܳb{nuፐ t7fc x}זkŠiW$jL,҈JBPGg4ȡ>ߴ x0[F,g?KYʱiZ0߈KڦY]Elnv,[e*+[UAiR#4=JWQ[gLKYR|V쀯<7Rn*"v"럇hm`.iq/KHU,7b  mnS1 5e*3%H:r2956r_?ME&Icŧ{qSm4f2_bw6'h.?𖽽^~Iڧ0Dy '. {꧹FLcb{}|x2]mߙ0KJV]xvG|u=PU)voprs=$3(Lz CIh_G#\+XzztH ڟ7ѾޒE9E<QIQ@p7\Dshjᬷ򠵏9js &ż%=k촋#;BC"1;il=ŌWwx5٘p <7x 0NOΓirZ@i8ӫOa3>~25 }ѯnC{ۛ'fshGM~@z'iXLe1׫& kZ&/:Z-Ϙ5U @MwA4c9x tm1FrmDBNgJ\Utf |%R ҬBV0*QKO|Lm5fsRK5oL?~;_-fvN*e^-W?UVh`-|]/_em/o*DÓOj0ja9r+4dXr7ƈʋ93F NHΌO)  mԻNpE@-wٸMcs8l62GmPOͽV+rC):ǿJ$ .W.XF!1qCe%pf9aE !QfthQBhfOw#?#S16P)**e<5BI lT0cWqj6wLȍzD& \w$+2@?!Z20(jDA2be lDPFk*G4 n4 @c TV!KtF4O!h|z2 o٢8|]rZ8HsmPCwPMUhɐ #i=q.S7Nmjg[R+W)ADVD%OFTADFZ=~e+@(hnfW^ tRSyj%# Ψ$FI\劫9PjiٕUW{MRӼT-Ri-LζMPfmQ"рv24"a6C={fRQ&7S`ZETf3257̤N̬6[p ޲EA$ f/N| L@Nf¢GFgu:!bζ(Ǔit6p| m7DK-&Grql_ET9~qRV`z(q6Q`k!ZqB -B'76Վ\Ri~j4QܩF JϯNQNs8#3*LL(T3ԈbV$MB:<:4iL2;1bi ҥ%hzeS+:)YxhyCjH>ѬB')K9 ggxEI[p(DTwr낮74tEoJ<{"+Mz|(1.4S㆗YrE:&("kqRzmV[[&hA#, XM=h;!jgzTJ% 80Qj 밚!@R*BTC٣gCKh$UՖ, ĠӀ5zXfcNh^u?]i Qlw1nf+Ta.߁NoW<ԶI1Lm6+n)SnOZCȂ OPP;xH\x/crSE5q&i!Fw--2nCoQ5l9qGNYK&ޗ ZS W%1iB)N?i;}_dtcHfT)DuZtf5i/TNKXp )Fv,m:%u:aO&(x@(gV(RPNGHMt:AP>zN<[L% 3\V."ݴ(Jh3&d--)ңZ: @&SLjs$'z`h'&ޤ n1ɡtBof;؇^QI _WcM*<%X )/kMg܄$kع÷Ŷ_Hm{5[O&UO'vSD"tZ*ΗSgoR/LͤSB[qD ue6O) [ӛ5 &(-\C]̬Q) wͶtw~pbEnQ6(O9qô>CmxCvJ{ 2/`@T{K U7s[ܣ͔K~TL!2}p+rRm]e,2͵^VK`m#tx,p,;z[i)Sk6e{D% _Tȗ `MzQ?~qi@F؛Q#Tg%QBtDK6U;2nn$53;tIş3v$tAgi'/xgKaN&Y+],T@z!L{MQ{.vpdv&ȵEF%܏b/-Y#P_b=U7BL-"piZ7#/ɡb҃NLۘQ0s{L ti7}2 􈍽uA#L5( 9dEj4->*5C7("r !c9[iaV Mg앗f$Tʁ]xrqs?a%q8;!'ΈYq?eKu=5RdK'R 5[ vug"k{6 Qt;*])s҉ zrK^8=MB aA--sǩ:Іu*lLֈֹIo@*qbb-Vf=O37 ټq/`:[$ڜq SQ6kŨ2@;QG9#S^|W[xTJ) u qsSC Vy3N}} z NN;;HtHK@uHFO{[';y\NfjwA [ b >;ܿn,G=Nx h +Y/KTG'??x{y8v˻қ>' 7:#iSi!/~] /a8ҙn,Ua4/fյn#U+J|t*l!nK@HUjHF |BcO֯8|\҂*Yz_~{KMR/7~]MpCOG;x=Lgm6J9ax#!F'\sʭVZVGa"s&бBH`IZ:^8,#B%yHfUe,&@^3b,X.*Q: J+I!,(,3*z+TЄ* \DG%JU)f2.<kEfDG)%YYB839(<ރA<I  *:K&0F4W&Moa^}78=:|NoW4T:U1/l8-k$g9!˅dn dTV&~U@~JCOThu@Fꢹ[q+ER.5bʥTɥ\HZ#RrKк[;7gli-dNj.8+w>:o]m [+}˻_ QP狏4Sԏ@i:F癦';7ϫi9V#3G߻2@Y¸y[z>@.`dțq.3il`^ o{q8LNwF.%۽)d Zi`wk'ӿ Gw-mM_!C-,y|@qKI[]KKWYRE)k٥˦LR>D".gf3r=wsHͮm[  |$N߂JW&Vs*VLJRoɄ5=!_?;6y0;CO3%QG &];;}Pf%{5/6"-T0xQ]ɷRAd&sdD4gL\N[ϡ޿R?ޢЅXr[:b*TgApL?7Oɷ{lU#厪r={MŹ|dD1T>O|T)H1鵍;2β-s6ލ߮qth Ty'7OGSrzv Vv[^kء7c[Qh×˴{# &F'[xY 4^&\Y؈m8@E<ہ&9޿x{=F'ngt}܀ ~ckܿ\ ooA[gar6I#ÿ@ [g`i"wi3\?5Û_y'~ŏ/?xZz2lڰl?J͙hmwU6mY[6\' | 2na;.w + twk_^{ ј9!/,!uvs^n47/j)g|ﹴV8mUt{n3u?kQsⷿ2܅oӹ{ >y׹ށܸ/']ӹrܜ^n\?Vuv?{{{h8G`idO78O=?̥ww2N@m4?YG,+LrQty&BߏnGoT.]شzbJ][|3a69E8=Z>[5ДŌ~;|fՙۼj?sq?~xƎ'gZ^>%7N nPf\hAoN&$uF}e =t^r<^g =a3w6C;UAy *JFtQAWNbknM&|4s8zD׭}EZ'N;l4KnNkzv2+/߽d n%^Ouzl(zo-?{sy.6kpa2 uuFͪMmqO@7p_= O:vgtL4ZVeno7sMv׽[@&(5[۹۶/fwET߷tqTq0cat<_e 15_.9x~FKJE׽ȕg`߲%A2+ǁ8 o/>f?|3+SýMLHEN2j0bP7kJlAc#-Ȧr m-+L nS5&[]ƞb<~6prq7v}/(f4ˤ̷Z,)/X zQWdd٧I9pCѩxTlr#Ļb\Aqgr7;>'&vy?\?` #  KC޽I񕪩*&+ɅO(xi#gQ= o BXo!(A+[N,J d7Sv-amB.Y}*] O͏ _zkZk0]? lsVxJ nMܧBG[c;ϬA0Z{Ӊ}pp2 FV.=s.fT<)+R]É*m,:)QF%V<"( xPw6p8R;_m Ԟv9)l9&>0bLYfV ).m&j^g`PNƕzM;Xfb^72NJe)82iZj$3n8FX#)h 1D5k7)wo&}4vqfy^T:%I+h&jDA7֎`ƖXBfeDVČBLaðcD#"l(%U"CPBݵT'm "_;E,eD F:`!4JUIU!H" $-#Blͫt$WH M<2TB;j朮W >|x&NߝnWz~?"̅J?ةct"1mGD{;հhb+Y5'cs\*ajb.uNΣ=Zs̃?g7Ƀl|m5m1x V⓵8\b4&r=ĭMm`9}xB0 KrdMP ISxZȞ-,ƈjz64]ӋlC*bR` 1wB`4珜ava5uCwh,G%`k#H!U\]jvnAU-nV2W '{N.]eo8޳+W|sri\ܜ6IwmABە49AR-;q"ǒl Er^)d2TIzD'  |̵D2X)7 x>Ywяn6tvݎ3b8qj &?F֨K~GV'1 D)! qQY^2=\:\x`{wW\1! 9Qp%,er2v604b5x6cD)wQM&xK%kh8&u8TF+1ڒ3Cβ;Rl@a@s'j lN̯%)Xx y+YٙF/&LSґn:( \ujˆw\v:tbZ6?L@%^^Q$v]԰:í0>}*]u U;QjP(?JoCVv+1*p$x/\IfqKO4(#4 DB q *c#$HKgITE(ʹ*;PBD|ċs Juke$%">@:Nm*Pt:*%}Rt뜃?*Rφ}QIiNY :`-HNK2ĚK3^ ewӌrąUCG&. ,/r@"ԣf({3.Lsq2:3S$h80(0vPcЅ]}&'8ӋW@K\76;A:wv8(*5yT`D@_^gE vQ43ۄ;rx饱2*0!>ueJguPݎcʉ@Z; {P Dky׎(ad?T^hӍJZnl2IIcqwh}0&!gܴp MgqLn&Ϫ!1'(0'b~2d$ǗޅεtjnLZ:M} u~ Pg<e7HOڲ^z8٨ęZ:zdf˿5V|tRZOo8G7Sv`f8sҞNf=p>\0^XhTO(*x־Jb6?"ԗZ׏XuG{J뗩֮pۖXtL/"89; p>JݿxIqk=?N) =_ˁqIH_*)4!X>G8:,v a-zA هFdf4_ڟWX>#@ϗ)mM6mLIjtR}F`t?:UOQ zzެ;unŗhL`8no9B-hw?NG /[u+69;Ѿ7&҅yCZhV7O#^6Ҏ^%^|iP<ܚWT(=ޖ[t`&C?ұn:'0Cgul*Et0.lēGCi79D$s,ӣ:Df[#8IC?=&3"/+ TӍm92*7~67Yϸg i`48~eˋ)5rدp=Ph6[Ad;:_?oVgJ[6[=R8%0"٧[B-@+<"k@ٓ# oWtUq?^Z'͙veLv:yKTgAvqMC4+tbl/vJtf5Z k&}.};#BGwXVn̪A7δYfv*x,ܾ򆹆nK0kX=ݿ߽`0 +]_ڜt?8{z?蟂1n~$.KU\ނf=m4 O~8a!3)(rLBS?aX</fZ+f=ʉK폃wCZY/| ˚)foۯ\|'3{ɸ)疈AoQ~ZxJ qTKrt>W@C?TX8Bx"92dXD'5tv[l`f~u{#F]Kհr\8rRgK"Ld8 -]xS%],]%WN2%:.E/I.Ni񊓎w(gS Ӏt$P>1b:uk2Jro -E l/{=b-n"Sl4h op~ (T2f0st?Q I 4mǎ?j%㹱۵[ WA˟N_M>>|ztz~xr;&:b z 뢡3)gwLϤ"w? Q!U$1$ }:~'Y6çGWG'_.Ivj,({\>Sby?۴a{(m-/z4ẵ6.I<& MT%y .$X0A㏍H{5އ*\O=Q䚶ҁ)w4N!>ECG?a2sou4|J|ꕻOt\2tKN9OP:q{DNu}mhBЙ8-pPSd䎕N}Cx*hj}޳R󞵿?݇Wxis %;;U+ x!M@j:{i75Eޭ{#0Wzf_YU/[r ל%}L|U(ߓSP\PUpD^݀# ~2c?%+kϦ G|2ǀڭq/r'tC3oIKܛep1eP{-\t9X' EwB񵌜0a(i@bx .!Ʉ"7Q,\DH2F H0*H\  ǣoŵ2 Vcߏc +WۉZa{%GyEh%\wJqc(crF#[qhKvCFʞS,ZE1-]L]EP2?Aӎl7sCq o0Lf򴙥xQjp1?""Z")' Wvɳ ML){PcLM$!4*`r9whm`~7Xw-PBOa,)\Nӣ?\`rɝuuGsBU96l a6ZqQ骙miuѵJnsBRIQp6 > !DDw% S4Wb2`W]^z* GC Q"")j#0GBA=()O1V [/SYN B;,ٻ޸n%WLYx< 3/ Iv)v^!E]JofHodo,v1 r4~ nE/H[ -bOqcVD >65HQ#flJ N,F .zLF9d $">qD8Y6$"3QkAjr})dG(h"H D8:EN3AH* H-Res}jͶWO/ԳmPL /E@or^NpEQɍ %Ͻ\"(z"].-蓻M=L;}pio9_A2}?ne&ȻpYH4_}V{QPq[~Z}E?,~7?snngwj?sB㗋 _B[a6&huU z䖇" V%aR޵3D%HHR"B)<`wK2mQ،d[xI c}zhD2agJXIq3E+D [fc ژN,(ѱ c^NUH@;Bni> ҂rP'@q#@(m{%l= T<5&O޲yH) u9vHPj|wRoH{fqgnnq6`LRj!G\i(/o6N't୆RaNվc{3oA~z5໣0 atT!b(Ke-Z툡qA4S(2RX: 6r֣i d$rGx4!H psÑL:"qaFV/.^>%SvW]xI6MJέrvd'+,tǟ/v㮎H3uDoi9j>^R,h>zN.>pUnw}Uj U ܴNwS]sg9zrfn'c;rVHnW+!935n=F'ee,`v.ОNÐx1CTFڊxy~R>@>1-9+-OVezkTY?;Γ05c)-b;O`\O>rFژ12'7[+pޙLaod;qĐts'm0ם!bmXh5#jh8G;BwP=|o'c=zz\p6& \A BE{؊َwA^fت=!lW[h%dZclWi8fu ;>b;%9ߝsk.Io#vJXhº| 5 դM>0M>'_8D$qrs;sraFLꡍFX.nuDŽDH(/6r2nN83}kR_/Jb!;~Yƴ2i1 JqZ2PRr٨{W=w>|Ǔ4//ޜ|WW?I*-A';'Pgm,,/w  0-lT>h ^B 7]9в}W99}}yRSux%p|^NT,YMtuo ^x[g>֎-PGSZ?/J2*Q8&X5Yx2j ɲ:rБi\P)#8VQ:H5"Ԥȃ͉{E.t3iaO{PD$is!f"*M`@ɬ+ 8jȭ ߵ+C^Iv y*|Nuk]ʭZE,(Yx!xzfWY,fD^クMȻrb ݼ={>넖R/xѳDY-;3"X oOAI/\ _~N?onN(j oaEcsV˹>ud5 rݤӗ-,jv񼶺%M1dvHv;:6:-0-jwn<(k3-,j\E-<脶Bhv]n:6%md匟^J^xKߨ0]ruC},SE4뿛\Zm?ZoN7e~{%`JA7N@ ({Gv7S0{\b /&<e<֔ly3̱kl{Y 8 דw%(RJx,s+[wqKW˱MUN1v/??L FVG,zU4oIeO]gghN>_ήסnL¢$$黙"̄HYDpGOu,+L(YSp>Q; <2jZ<b 6:-W?[15[\!l6bpx6po=%oIV鋙AȰ82!6(usz#P f{"O"SIWٲ̐#yz' ;]|fQj @"I~fjp)/D x9I8 (#s@)XLNe,kZ&қ<!Zcq2!|_;{KڝT4dv?IZo۟ٲZ8|w9^o]^_}^8a';y٩z2JW~c l=!Xq FX棐)hLg/)@PJB_ґBljQcug ؼRܛ75Y)9?3dy3b) 3wD۔!2QdJdbX ȄYv)&zV\$ÓKbrI0F1{4>ݕq5aULa˰r|.KKD G^4? 2TrԮ!e6$5FZ$j99+5jn/> ZcA*XT3KxVEJVQ?\k%'G:7sɬ&;%dlȜaZM<,ytKwCI]H[*6Zܧ6d47m[? 0x8&JoVJ G R;1* %9Jx%erУA~̜c1=vnagϐmT %OkE_V|Rdç$_w/? ,˳1ZcNU4=jPB $Ψ@S]- ;ac>qlɕ׊ 1rƲCI[;aQduWysXn^ oKF͆ߤ%78t!.v@d+M:t\4/6hnw@[u7wFZ$zȥ5Fx5` ȕ  7YI4RilƆбt^R,B\2MdϚIqL$Y0*Y&C'x/A35&o<tTDžûUqȻjș^:m@v4c1dq1BC*xj/ R;[pIc->nONApy?m^ & m1fD ۹}d*2\1>pg[sנZH /'CѶNOT8ʀNa?{H N.6Cp`y(кJEf=aMLr<6ӓl7,^no)ЖXaU,@ QT@.2,0RP{E{4ZKz?̣y$>faHj)ǚ$J#" ,Ae2?MͤN ib͌&A'q;ע=>c^BCu&Ӛluc$t-?vfScޭ:Pʓ/w6Η;˧;bYQA!(Nښ1,)/;<^ >x߼{9&b{^z.`[=*SZ(A@i_A 6uC#|y@?m [c]~Ǽ(qg]\PeD\kڬkߏԡ,lI`]}zuW+_| gq<(A zPPz9x J0A z+~jy3+iH!-Xd>D]6ڹv锱^ j?,ʗƎѺu-=M |Gdw:Y0D;yp>Z>9QJZׁg eHOƲx)֝J3b4)ms#Uywӛ%ۿrۢA U=D\s280 ݜ{>qoC|`RȶYǻtM/ot[ۀá56UZ/761&jӗ*ڄAB CyZŀ `۔.qZz'|͕w3ۓ_ç*F'[qti<5NmBϏc|hDZ8mO Foj3km' m5(-q%#Rtվ+n1mdZ[ FhYn"kauq=P qж?4JHeև4mqz]9:sw>!CP{GM|n>fݥsnCF5ĎFz\wݒ.ûN !/bz[P$?t)A]S_-+_۝ ؼ=VΉc cӝxX9pGʎv3Gyysw0Y!ѶelDdm^5Osyϥ!MwI`4'|=9udzqKz fpx.bwM߷hwZܖ tpD1K TrSHI0' RͰT ddŒ1 3N%>Tm#79;G0oSXJCX,NPY%LTIR$J 2Cs*yb@(^K:U 벆ԂJH$VCJiБ VO?$%U,V- N2\jDqz^;p:NnK6deOC6wa dD#H  ɟEtTVj̾UgCOdVb7GRXeOG훖pB3p\:_pU6p n,Lõ{fPD( NDiR'4(JجY*22CjlTVm62苙$^/p|_f2Emh>r%,o &8WbQv';m(Ɏ~/ɯzdD׉߆-=I.KvIa!Eb0?^L0{CQ})P.97^&#?NƓñBF$Y`f<>Dbj%?x)â@#|):$.Al"tb=lp]CK A&Ć  5 ލ!oLh$o^7`N>A@S-qFQ3uk-|GiyE[_wH-Sui/ qEt 89jLdE G{4=m+zj pE+jp1|!P&4r1a{ vO.eGu1$EBSt><~= 56\u!g-řbJm?Uh^'!JlUy,6_QF2xv;2߿[$TXfFN2%ՙLqd"\Q)N`2yQ7] $]7BϠ` H@3.,x'N1(#*htIyQܥWouXwE]/w6lr|cu&%N1)O 0CrCcf,wMb3`F%ΐk"2!@ibs 3G`*3E~o^R=Z{J?2gs'gyu*w >rgp@ 7+9}ƛ[ޕU`JLcC 1ʘ8 I΁c D<)S$xuY/pǝvAK0Aވu@1CD4_}0H}9vHNDzʃOO Vdo|fC\VY O `x!ʰ*kug:):Ke2$>w__FeC?꧗/~O? 3}ONXl^ܹ,^<ų<6}:hL|grt&qOi㭥¨śQٻ6rdW?uۼ_!'zA`N M,y$97c)$eK6evR`cZWźu=韁Rؐ f4]+v&cU0FmTXWLb0VlTN~g+MgYZB JֶN2vvA~sM A$;=iR(›XG#w:W Y/S"e|e-4AN2b0{{5T%1gu4E8ʼn=ǧزdmc GTSY=HdB ;KQ;oY_5mـ\ 9Sw/Fiv3U3;? I^tTʝKYL+5]>\KyJMR"Z,yIFn5s1*g yV &zk,0 A -uD:~F@SBT"m1}/_A'XqEڢN!E u Y9Meh=ߧgP|@%%TޚVT>o*cZцMRNXe}fd^nUWw 9nIT.IrQڌ N#a )˺B#&ժdm^68xOJ(eBk rp9.*H"#-{xI)ZT矿 ܸ_BÉ#v]FdA6cT6RdRk-c`.3P 3fAf%ކcx|ֱE(x'E\0!%&D`0gT ϖ1cPq+fԅjܹ)ηaUEqnƑ{{5zjkOS79$/4,&:fj"zEU"HVHR$AW S`[KuK = e{D\+sD>Mo K #VMJDDl;!z)ZMq<'4ьaE(*-$X\xBlcVJ#d$S3TgrqMuEN%cXB7 obkԖ-K-#\'k@hunCyZ)ڙ-Y$뤡]^npaPκZp`t<'$\6,>Od5?q D@r+Jg%Шn$Aw='R-UDײBj\=,]4́]̸%ToǞw%ln328Vd 9 JuW(frT0D6yE$3ה۽L]~9MxWkWQKh[8VFngKr!c'awܙB2*}ZuӄR5~Z2L5DoHQSpHƂ \ڳPR[TD%x:%fkR1#a6MSJhT oG3\{]F4Znr!J+S٥#CNXAXnNpqŽO(?%Bo!PmLx,+7=A5$B?f~Vxs.%o4E8љ ŅBLds՗,Ndf$k▊ܸ"Cy-cޑƌu+\N.6"LV ݒ1~\jMb4n!bqvoG^PWSsm'$'k;sbٝVLmN_E %߈M$+TNMn%L\ q0x9 ?qUb+P @7~y bxN Yf [fM9;)g'4섦jSΪ|X]h F0͈^x=(Q:犩k-%G ǗYEm'TvBEm'TvU LZPft<׌‚9espr0†aJ :P|`'1ZGI =@W۟ʙF0 a:|,ġJ:ġJ:9 ,yGEV9+s" I᥅ erʜ#+x 8PN 6h+H%xJTJ'R:!ҩR,@^Q%9SXlJ(-lJ" !TUG^ԞJ4cg{o^Q_|?y߼ ]u{_ Pj*ybzF@CHj , ) +oZ++HD ń2el\bRlJuɘy +9 IɛC'Z*Ybm`, *'܃? X*^j{\AW0 _矡[*K/UAJ3EDA\q\@K\NϼCRjHB根hA0q\ tkcyQJF4A$g\E(Ey$ R4bsfcP$\&x)G %@m %)IR\Z$)*$s2=KMU q!D0cx 1P`aI/HBNh08/8=\pBZ'*B-P È8, nM%i;-Q=n ``#̏O(A2grK2ŅeS*`XrQ!C5zQj OLaۜ"Dܸ : QPX čI,)0!\`S)ӄk2v1y%!4hp,]ErsB;)s Z-`d &(y FTm$ tًNϿuCW(_ Kv䇟+owŤ~eϰTQt ȫwʇ>t< {vv׊pv5[Z~b~8OWۋ^?yP. `@'҄f\u쟰D_SqC\>""o8HQ Rm&mI焑cx'ƝI 6hg|/T/~'C?M89/wO7MPpzۻ8'_/ao5y] ,hu(|\|ڙ8zJW:S%aA'=0Kf| /!y$ \+LW WfgՎ 'xE>ppC#n9WhS Qh/O-l =/ VN-UMѨםZXZxj '>P޽vf.&  )1z~[-z^ iٖuP׺WCqh!yv) { <2ԁ!uMo|-% iQ Sv=t+5{JYK!*tsqr-I27gɁpTr(7P@y>l@}?k0܇ΏrοM~&ȁP 4`vfFj@F^!cEiOwq .\iwJJJ<$&a.ǞP5kԄrd:]ҭ;_^ޙ_jqhCkFOF?ux1~zfRqew;. Wcʙy?m~ϰno|ƍ87;^}o^V.g3X9 469x{C?'\s~v_9)ro_}=6pFshξ⊆uSـvD>1I54~8hпrw|@1<=[Дù?;FO9+WsM3{޿~PzSF7+ ԭC\?Cx<껙p}[[ޛ|g~9ҿ6Ф|8.?QڿG>L PǪq97뎻mVt܁fGa3w ^u_=.G00\e7(2)\970|5^糇yɝ7|UɁ}aT>_}bMK_x(h<фϟvaz#?OP&]@ /O"k=|/hgOظX?{WGj2xٱX`3~X7 IjTbJR*KŬ<*SJѾRcy0[~Yk[^(~'?s h:$Mba C(˲z,}{҅bB3q h.5"FRUD/gvo<)j:OЁaڹC?&=S9RAC27*-rMdh:,YT /SAoXq ݻKܶb;sƨ!d=u rI]}:Jc|@y}Bi5Bs ;<tΏW{-j(#thkҼ;K5 חߔɇ2/$7Fu FMH_cLJ~mq2u8qcÇ~ӈ#K@>xB/$ζ'Ѧ&=ͬF]'`$vZ}?JiOy$C2㊢ۓ?a_Q}ߕ֗x߸8zz < sm&vsmQ O{˪MK,}E!ʖ++YhPeAcx,'oaZa'S \uv˲F9Y)C!{z+'BV60.p҂ QG[o4pJJ[<1Ƞ4 -LeNSq8;~TX\ފt%$ː߈khk+ \ix=6vy4 B2ʡL4tJ)Ap@ҡZ%IJM0-"_L ˈg)$M> r +*7~!p'<="ЧFfj9ЁZ(HhpVP5e@UTF)6]Kg%psZz}ܢEpgc;ߪh yBi&L 0HT1eQW`;6;A@ Q,2yjf < Cxܫ<8J#Rp+B$U̷ &$->@rGx:)5N+.2y@k55NkeOI 9%酽z s?+3QKi0r=-S=˛[4.5XVZ"N9ꄂi<_7㧄ÏNQTn=8A夙 <[7(#$!s pK$bIpJKZWݾ~-B1ۗ`yǷQG_Z F{ -.B }+qGx.|(XCp{{2EI1SR:n|Ѕ91I;2-CG*Fct u)n۰vDUlÈ:ҧPġK A-):ҧPTE2D(tB; T5+e0Un8RCQv\w.UԵÈ:U)]yUKҖ}?UƘ5=`|JJ_5M]萫h Ƶ+؅fbu9#pՌmaddƑpс2V-aσ_WM`R|Ty5W5i{ApRѽQ7߽In:}):+ۆ\S⻕4}Viœadnva++9_m2 587sۣiڰHw9˄雫s?.VCO<s-ZQQ} ,)v㞃 9Zb?̑2f8jNRY4վa$1ÑBc^<\^|k$:y2Ñquw[nK1'Oq퓚S#(x{nJdY=nܐZ7nz8GJz4aTT h;ce'5Nd ϱB[ T wǗ͖bBNu<ج}n YރA"? xDx}k9sk&ì. T: mJo ԺHgemT: 7 )Bd+Q]Fd-܆B?]ZLr7b>`Tzvs;|-kO`l}ˎ~ޟ.u OY=#},(J8N`A#cٷB [ɗZAN[-A>vDBteuqO/o޷,AidwV[#i55ZԹgOeFl m$dWwq˧ ZNW9f] A>`°>VaqVӽ>_.k\(b`AyCDZ8E"bs{N&e]pu\C*'zL3 :jYpEaPQGN]Ik ^I$--eiiZOnYp@֚̌|- UA%3Vh8ZP EM%js|o˜$8hxbג0S[p8^s;h4~ |N" 7gK89W ׋Mޞr[0'YKrQZ tl'&ڴHb2mndJQ[ӭoP,ǒٔ#,k#{.-F3,4!vƝ'h+yS|tzEg d]K9*еV\ΠKXD^L`da8%F:~0'(M}v'gC=+ll^8 a obK3N 16Wߋ$6j4{rw`IoWMtGw(ks*\9Ψ>nוuG5ÿ05^rjFu@yiFS9'](ج<F=Ndr<^}Wx)8^bΩ]wo60a$_+Yikv>o2#MQT5=zmoJBi[| \^iܮ"\.wi J]ωѿvQM@ōG-Y4< ]'amׁBjbwi=F2KGY+R?*i R ѯحk~2ZK6d/2ZfC_M*¯@ &0!6V.EYPk"x3n-& ZA, s Yi(綣i4c.IG#ib hψ|)ma$8MH/ڼu+쫹NMO1O7 y3UnX抝".9 IZgg(EYKbHJEioQRRӶڪ#ynTKvvhl$0W $(;jãkVщ$9xTwiŏ%_WB`1B \}\C-a^t%h9V'!Ɖ<KI)ːIyT d:e@w ǿ3*~Ɠ9_]g0qAfjr(1h2@1(+V4 @ֳz?sϥ7Ya8gnD .q"@_`\1b(daY+aa@l :e=혥XAhlC]5?n t@6c&' lԘz~ЕO}gm,9q`&eK҉f7o毓He*5%-[Be H~]A+59ijO>$rPKڝz1V^͎kI+<3 !ڹ`@Kc5I71-s~ug蕃U*Z4.ܻRjkJގswO*܂0 Q\Ҷjϑ=PkU '(̛ cW),Ќ3ѕSZS]~^)SM;]Kp⥓f γD{5{p}BT?e5^΀Φ3;c˧*Y,vf]/mhi1Pyc&棟}7ל2(?,![47q҈zfOg>϶EpbF\!:b2aq~|#R:duR7kb`-6RZÌ|/?oo筀xDoff00=u? _'_`*O~r)GTx[:6'!jvw]RAӁ47aDA,2E9 (R.̈́Α͕i= ^39SDXL[֥qxIE)Pq݊w("%I%@`:lOҬɑ= 8;Z7C@/ U[N[7>N4cjYo9~$Zh!(V1&Uq(^8?n&9!+78?J(b4Ҭ(Mjhq GQۚIܓE5k׬车 W߫@M*S /1,ˊ1F'15Je nt,ft&UB2ԫƍ^W_:l"S7_3×WC, $D+wH $)12:ގ{. 6$7Z=*Tdx%ybݜMT5(:T$י%ӊydd(ikZ.X0*)ڴF8L6!lܘ{chSP.sB&[,tyO_I`[Mp [ZG kP%?RJZj*YA2lIdSVDv97ܓ@ BңF7DN~ VjI;+w4@ߗ #]&X;,M1Ui_'[%&H/0Aַoӡ[ʢʢʢʊj(e+*k[y8As"c@M|@[``4AXY3j v&NT ؎e}mQ%˸T&99'iC-3 +Vt1^Qx"_*ŋQ&xZu@"8TK]髗 r<{]c˝ -/!l#~|srlkwtdqP¹zx!jF"%YPg0A#0 9 κ*Ʃ&/OFng^0CRho92=/9U Ky)8cx!xKTS4t9*U G`~+fU&IC9e$W=njNP.Ž "=5kᰖ"*!G1dɯ>q4`gzi%Fqb `IhPl0ףRh)xSkX2$ # r:YNP=Z -B//$i V[= uME3 Į.T= A <D +OE<ĩ h;ȷ[[HU@6c gtb3A/0n D`d~M.(^Ϛu B)fqw աiW[2Zv9eIoːk:y" "S<*]3ֹInkI\C O:  u%AX[>l-Aya}.Mh-\uN ހ1}aaxӨ 5r_Ht(C@GyY,d`2Vh{q5ej}Lu5!_sf,ib,-We=h,Ŗ]Aeo BItz:I2GoDxi[vm¨H&E\:ئ{Y8\fVd<}bUrhDYKBwެshSIncm$ |Ic'_j&;{* 1o yʄ6bRBH7J# w$l  fSJnvl×}K?~D\N}ZkoTĸgYI\, vWX QN*2A5$iFPOGk'an c~3bN&ʹ@M D }iSDW6Or6Syq 1CK珽D^uM?5&ɣ޵+bC߳{Dx5$-6m$b.Tc{-'mO)V|eb5% gFOm1{Gظ ?OeMĹص5#?’`] Dc)-I~5ܰ]#,X 1 )]lbαDbW?É]8{'#ҲKV-\@ԞحXllc» dXUeTTSjk r22,$ %7o=2x7r#S7VqsmvT ^)5G$a@+a7Q X`ƚA+ềxn`3l[ O򙔊Kʁ`%[%v8q8\ađo!iq0yok/54Έ4> 6FJw-0^/r%\ 1*^5CNӭAM+@Cbܢ6[='&0YCYI{QzF:$TX$C`EeMa &0*3re;ؒ=bc0`WY!>]p gs\3&+ԗptܭ''5Nzp3|L.?ڗ4rsezAqѡ"Hatx.KD':$NÃH,qIv`nn;mӓׯ޶N"(mֻGi{{Ύ^?N.2i&aޣoڧ_>woG\qf hxTQk|tJ׍Oûp4vFZ,u s珺Nxx|ޜx,&~> zyECgVt#0ϝQ5;έ N&WO `](~j9f1#,zS 3$Jbnl$r(}G~kIȜ8ASa^7uwaϼ 4難Tm) >"; GfKL13'p\Hzl9?6׭o|{vzs> `U~iػr6Dq{ ~l֯cY*vQ׿@ Lm]>q9$"l`6N~_]5Ҩz}g1Ct=VZ*_nJ E$ 7~BjI7v#IX+ja%0̡2$è)(LM]ˆkՊ0v@x9basìy mK'ɊӬGhM} -w]70nF-;-c:&D= M>li}}]w]4:D{" xc"mx xঀ@v\\x.0pU\~Nh' b\" ^_BQN;cH?2&w]1E.ٻ<.lɉ˓wF]]uy+.]LŖf"1><״mg8*=pG Z34SHiTa90(X tI}` `iijן1wϏ?(:wGQDLttCLA`݇ၟ'<N|I=xAszTx@oeKaqЦNq-guS$ "r1aéM]4DĻhҬ㹭&SnzX|sm,ssWJ\mcѪBʅM`>d߄%$\&,HmHZ.ȱ]f`vȳ,|4Jd9-,5!ae$cm y~ǚ l3 (ь]'Oe[ep&q}j r5۶P좊%WQt w&mݿ趉#a]Gd0EQLŕI\j۝YYAa֌\(ȒJQh**c W?'_Gjr1xN̶G)rk  <itM6EN:wil*I%gkmTh\^[h̴ph p8$p}e)fSP|}dr-v=|m[Ҷwu\Yw(WH[msB9u;br Dq2m+u17SbzJ:**~PsTҰ!Yu.z\?.tWDQ&_0yw=Gg7\<Ԗw>Γ: ;o#l8%|da^*.Dl t:R)o}-Af9]j9gbmcC2W5y@f)AWV7wnZXݥ0۞.Z- P3ļXcE@ÆHx6GA+/aSSf!V*Evm5l+K!s^-%1P `ؿK&Ő;R {>\;Bk2 J5JJOq7~`XԠCTIZ\7Fe2j/&jwB6u ޸u؞7w@p膽C׉:mjM9]sSun"prbD/c} y[ДJQx4uBѺ|ήtŻRfx5^k0ծz4<X2BTj}쵟j|xMa_^0!Y %hf(ɟ}ہ}pFWDބJ| )\F0 qGh83{01<0% eG;oS8vWIi(׍]]3ozt=wCGkzwUcR >"; G_bb3pܰB!χ#'a@/nqqkG֯VmwoZUtv'?@N4z52zC9JS@;=v!s#8Hg`u%N`5Ӧѥ`j[hݻDKy/_& ( )!}LHڙ$nCl'mjOf$ɠ{3C(㦭$j(p O_A V˩qۆ9,R;]W/O f!iqEBmS8* &fuݰfu8m3uFu}[̩ӛ9[ls^t;jr'~}@Fcқ>hlCyZG0ƾv.ؓ 6іFd5'~߃qAl,o\__]?qǖ';]|3sZgcWD 'ٟvy*l.yxԜ9Y޵6r#E^k&kp`rc'۱d$߯(r[jIlF HWbwkyFT R nnrH+Cu"7CG ڮOI}JPtѶEmEGj܎O0|76Wiġx&xw@<;@meѹx.^g(Q{q+LFO[q#=f=xz vfUT !ס-֩t:mw#z; 8QoV8(&ֿ> tu5.88ö ]42%©JKi!tʤ&AaFRP~iU?uBEf)a;2Y▂Ltv|^rq6 r ӘscNc~1f@s[hEq|Є'RBؐ%RMD6y?,п#ny7$]݄WM>$_0"p)M9A♓e=t ~|ZCڤd)¯S%TOK6+nIE@ e.|ү7w 8 VVByR yeʦq[뉠Fdӎ5q{h$.̓]^.)W׷cNْu)s&%,ne*A,IDR=-(*FF<_ee#¡Y [~D (Å uQ5莪ڕX[&TNd$&h&qB7A.7,P3:Z jpij9Uhm MLY80\kBqRnu+o6i)ZL3Me~ Q`Jc,%(&<6\*6(FV~%o{$iT ,hQAgx& }RMiCסnb7aB:mF+N*7NgS)E y;@s76򆾒n6MBۙ6fQڌrhK3 EK/yc:.- 7,ÓBFR {Id$)hx[jV2öjm^^F]; 5Q`/+0m/͈Cz,=-'gIz<%p?Wz2wJ2dJ{'Qݼ\v;ÁDizZY mI ibFKrFuQ{ b#i||{ ТpN0ythByTyjfbL T Q@yurSr։d"VN8:Rf'ҋN)c^dnQygy6:`K>=^ Jr,b /rD g":Evi6g'(.4YŝVCDZB!PF@^Ϳ =M|߾$~ _z"^aprSɢKnfG<9*z^c~.I6 mW_-Gp^ !M٠,{\0Es iƱM洠F}obP}]tFPWir!78R 8l@\t|J*FEBrib!ZcnӜHJPŠQz9P)",;c2\89jMewF3Z BM6NPpĔl exQGKyyXmy9jgˆonV0e^W+iC~rET2x[W[DOg"n]EIʑX{Nϰrp{?Ş`66hLpt?7)^;pFϠ($0`e`p'P,Y>vdL1 IHxfTF| 8e-Ok+̤o3Z)a-^xۇ; tθ?mxX4j{X;+(YuinC'ž[fŖ95;akYxCfw?㧈0]}ձG _?{{+\0eG6 |q8Jf~*ͫNܮHElQ/x4JpJ+ЄWE;S5>9ְ ZA$-Ffң1ySg$|=b=wZ|,N8'(Z;?8nga5f絳>f>kndF Lw*^IsO]ړ lkBUGBv0V;mt2fHC.XyӟѮI>ϫgboQf0TuGC *q |T/_c7%R~V`}NN9k+g g_n˾{ў]k7M DnKkN(]Cd4SwYW7?؉?0yY)&ŒXQkk2&Jo+)TTB -^̓m+`F~F^@zMXT[đ]'N/&# o׳*g}2ecS8mMŴoW}=O-zpd3NA\q$2 ;;WmB&h Z ovnrd M9.sow.=Ksg ?{a7M2ξ*(Sa?`V{(UB2pADI5%(>N2}8ٌWaJ@HڭCzu3Gk; 3j՟*Y;?@[<2]qx%, E_ F Jjsmn 6W\oy舩•O/Bi[S].|G}^đ-8Uq" T[.̩jrIE<^٨7|)bIpDk)?70gwjٷ(gQǐ% [+:R5pCñI}c.BӔ)fmfY<ﺥ U;f:ߎMT}+="(*T}k#b>{|p B _@D=wVF`RrhO4ʼnRXy!UJXŜ%N@S{j)ÏSx'fcڑllF 9'z5bHC0ёdL8aȍҔ W`@ \' 饥=DQ% - ka&`h%淳jjQlE* Ъǥwwm}MAV`.q&;Eh.Z5-8{#8P?Ypw~%9Ep퓜I]E 24wiM 64d\3CŢKe˛w![Fvn3^^"*F@8Utw):+I,#0 Y=ȲF;r<ReK\aMFq[7ztBD(4hK$m:p)v= BEzpѝ=ޯOǗַ")+AZͭ [Vly[w@%'d)晗 i&Ozp qm#ƖKۙ+ẚ1UB '1>uezcMӾi`(8G 9Q-}C}h?~i"|I3[/ ,.?nnFtç1 \d9߮ىjb+G82Y]LZh@CkVNc9pLm[V3:[:~/͡㯶Mf!6ww/cހi/[W 9ld<6~{3 K6|/{@zكh¾(W o '[ń0#< ^2?]_}-& js-xbQ,%}w錚)B1Ost] <\@OyYiߓa^44@$cX᧶+O5Ч2(G'-FtbZ/O{W JR #D2Czw(G"sN8O{C` l޾rM}]{%dzLe~v<Q~0%9.ј[獌F<B2]` 8*0+bJ(%\  hFbsC B 1*2ϢmlMn4{䟭Uw~./G9͘BĹI#yUX :6?{WǑ vPr^!Oe1cDG̦('yb H]_fR{r=R˕g 4\D4QшH锪_Llq%L80BNh'>b!轏?NzeA2 u-99Ҟ9S1qF R@ӲIdF"shpJЊs1W^F$_z@zhib(.Q4P &N#c8HcB1`pIIJ)%Z0Vm\8'Rb %%O[h5RjXVR2bQ幐Ɣo9 sLDo8$ӍaVRT&7֡ L=n9лt JqM'ZN u yHI:遒RL􆦵y獊D(Ŧ,:kbӦ4MԊGZ} jZ=EZ8$^!'@%#pWFrSq*M=lV@e\|L$]k'['h,A<'J]pD6 g3mͭ=O>_>]ĬNtk8ѨIƂ ח.VdUYebb%ʐhdRDrצ"5-ِ'#ʛ8dscz0W*HέazDGЇ "}[v#D }Ǹi޻c_KgKW j6_6WߧI/ai];?}>xKo<}GHj???]#~J:=!w0YZ%LjN~O]=J_^\_%/,a 룒-#*}> o\J~Ow'6'?Z 8=! a6Κim848j' %gI f顠8Kn6&e d~(=$f(L^3饹2g[9yȿE.IcKzۣa]V?[/Z1]4\ve|8t4O!=U!WY-(j&%._&{-}:2Im@;0ek=ݭg?9պ;DބSzll;jɟO.o HN{U6Tœ-dsMv~ծ><~cРJL`r(FLFhw16!/5/Os 9~?+ElyǴAB:H{)gIRyKpjkeDD'0*6^'3El(Ǻ/ 's쐟29L?-yvo/OmwZP6Dj%^ gݟmf,k?[Ƌ{׊\>j nz2͠sAKDJ8n~ml|kNt "΁ꅷBy.}pJshlCWڪ.o>)lE79LpGR_XިD 1rۡϒlC[YO9 !3M%:#aU !TBBXDCIXw/z `"% Kͱڍƾ=fC-A1'F i*AӁX9lO4cnFs 2@P v{;6<"6(e;ET$}FqD27[``c1_<_o7;1|FYFu91s詐\i3"Du-~8VK9ۼ)Y>'z@St_9Z-"FvM 9?B+}ZZu2 usòm}/cRF8oZbxpW{^ jheI'J3,&߬JY;e_܈RfYxs7FNo?vǏלWU1j[')M\}¢f68#Oj<5L (9j=Z ZZEamI#}iG{wql %{ ]m7k3W H7@(T[;%6$;"Ia1X<+i"2 -e\hPmdN^͵^K 聦kAmK>;TDD'훠(cBq ↏PS] ;0hi&fDbL$#I4eVd2- f#xU$OF#7E`F<ܗ' ʕׂ+\9csLqim>yRBBovo{6O^~O\>ud//s$JN眕T+i+&+ZN9;Pu8ЫI94O(H]zh[Nj!&>EҢUJ!4,kJl ߟn.H?~G]O*]Zi-˭ 3*;W7˩mVڷ|"xz9*&x+p BVLx"C@TmXKPdդˆ:\[ş "|cq}8/!Kt|%LV7R*`r("cR읰3rH9uqw}Y%^"BIVO$cn\F6\/pL > R I up /N @8 }׽!@_N!.wM>bQC* A7 RPtl񒂽z~xv]* @wTr`s߱Gsi )U{tA`F RhY.WQ9n*͙"0Rɀ- V[ޱţP:K!8>~A Q=]6IER┍5vZm2cLgQg %Nr;ZjvbD`HM>x 7eJAEΑ'͞d3{0I4G צ,O/R,cD\ߊƪy/TtQ!GZUN|Νl řL"|Ù& vcJ(Q}8-P1Uo^~׹BS2ۤk|GB4ҀFCYnǍ#>`S6B\s pLVJ'?QQ!@)9?_S.k]^k)qI ZZIne0QOֳr?f~q(|aqbbT秱y,i |?>ݞ_WyVga>pGP# AӞp<(4$N|Z?|Mh+MI}8w5oK-yr9^/ O[BpG}"}o[sq¦Ч(pJ.(lL\_|Os}ipDEDK{ i h+x$e^IϢ\\ŵ}ާmemsz_-.*[1VE N*-aDEG @DK&*|sn  rt&;As֠GcKHM3Z ~caGOp7s!: dv`6AöT:65rAi f+jcq,f M)N6/Aa>K^C)#wAr ۶ͮGST({ÜڡM+xP9o1T 9-kD#++'Ij$o/y$y?+Ƶ;MHԠ)W ]Z?jJEu I '06߂vMj4NYm][;*lԊʭ8܊ʭ8z+jmWuj:J \5踨ˣ\+eM I(ynãヨPkZ,[Bqar}vntlAzin~0D dLMJVFM-Bv۵uWl% [" eXA8hjG'!u|d:B59 [)ӗa xB2{1"O:Cհŗ?Nʖw©:Tѩ3?Q\۷xW%֜=ͽXЭa .\yx~ -݅^6{*[j[&nq]GC2|or*;?/k{eָp^sꝕB*)P^IdAZڣWZ? oZ3K"*dZGiVQcx_K9-$j lk 0.VI]tl텇d)x:>+Um,(/(畹.0'QThY16r]jm|G1 ;PHR3Y%w#2:3GSufIټm.V9sԳ9@ٸoRZ %h 5Cs-.CzMmWZc$$Gf܋햳r8B'OUng󗽿7ր{T7L{T7U5 h[ӂS>_>Z=i+xqHVF)ˮ0La"9:AR`/S; ց٫< I=լγsw-˩X-<˹G)˻]ug7JCeh*ᖢ3fp_#@=7DU.KWĸ[m\'諃!)̎BKo Lt)*0[=V,:컬Yw޻dcd zK9JO~Of+pZi|iXGm$S2mS"jIS{FLY "!lE [$p4i:69#/s$HFA*cs ^b1SK7&z Xp Ju"{mSEJ=)XcQM=@4Y2푥2D `FG۰B팾'{?FS^&pۓǻ,( }z3ZpMAl$= x93 8QcFk($B`01kǀrgh$ijfIPGUV(ic:6TlΧZ"T){~,M|Eb?,ݍwAv` G#c%" V2 :Cc$9(8S%uMU3;wӛ痎:釮~ :ZhtjNKaqE)7j4 F.|0[s#p顁yj(BZSƅpEyT> YbthO(a+ *P)8uP(G{jڀ `jNY#*D(g8cCd#/IjuhQŹsuB]jpԮy<͟y`G!|G8^&h&j#/+тqDXI$ GDT<*t-CWU?6y?UL0;*qᅺ]}~,HgkdsDAEb ? (c Zy E_EB-ǒ t2:L"M$èrl1,C00q 3d]RJRWr0h7⧿+6\#VԔ߿%o߯\0/oTCzO@oJzDW_{q4/\,g÷(!/NNfr2h.P)?1t"k7MN.1zW >@煫": 9jBU8.P)DҠSn @ǕX>Ò< \5DyN'U ɑ~ɰt yiC盷{[{+ $#{t쵏ovɔKr1c^Q}05 S05 v( C*G1sB<5BQQSw*mN`uvR5_j|x#+e=S SickLbRcP@DZȝS>L:(#:˶cnќغ)bq@IuSǰk[whG$JGxd"I+Ga)ɥ&F1fDTMrс3;EXGg!hӦDR5C$8FHV&pT 33 E2LIkAdfPcxzSpdS?{)\kV63nvV!__Ssv؃o6&sc Rێ zf/K&3Vzna^~1/x*9J2n(Wɢr]I~Nw`խ'nrВELUi\G נݚb#:MQG= L5z_ٕ}*V^4A 1ٗU)g|XW F!ά~*ԗT:b^D ~*߯u6u,_]- ѣ"&f\V7!źni#אV)YhL-?8Sͦc_ZN@m,N%,&j~7) q ?ha<眞`/ɟ'd2 $ω#qdwS"Ş#xJbcTS ȩω !NamyM5gxq;Ρ?{h;곮QO=LNxb'Ϳ] rr>ع\]9Y0ݣLjS ͈Smf0B7}LfTzfb-fp"*5u6ZJ{;X75 0ZNjT Zfw}Cwy gFҿ'2ӕ$.%Ϫ}jx"*D!"hQ`RƂjp,=S91{ ϻOH`%Qu2pX D|삣sʐ N >0|Z2k^hL\k:dB!zrsAfv$˜F ?ʑTHњu7pƈh ;JUzsB l9&<}dClJi-;(ߓ!Q|wbYрI(uk 2%UT׳zsllt_ -8٨5CΛ N?aBa )DVC)HYϬ$x#"CXE#aGa i͆j6ewd ,䃔jAHvICj<й٣v:s:)1ʹL]5 !Zzʦ3m'Z .iWȮErY#LPL(x ˗`Ƌۚk\ ԩxERX =3),ոSJs|>Tp..C)O4Yigc? 뗳IU*tT tԽQ8Q.o^%YLJy#uR2uLЃnjNdNH4b z1ia='ؤ%6n\)|"ZDA1J@E!o "RZ%! o8'`=Щ u`^wP+GZ!\qd̟aXL(@H #`T: iXLgI'%`H+f,7P<|< `(<>>~{nL\̔+Y_թ3WݮϣZ{6yB(qr*Y=J3C{k͑q$8}0F"X^J)i qgEsG1&1r{mF 9|@)FeP$cUHGE),isY+TZ3y3}F,e/!Xo#.?yf\^Bo4/K?lI~"w?sqa:~cci@rb8LSId=K?C9D%E\$S|Hތ.MJb{*_1̗j0pߛ.wK'yXF 0qZIp4`9w<HxG1k+I$xEUvV~S{C"CH69P2Wx0&RSsE܊p9 JO8x.\j}QrK~8~kVheEس;bkA Pz΁4Ѧ~q #9rCy/ t ]zY#w2uF2%h[@F6RCI8^zS> yKKR@{mAXkB~\63m]YHWhxxcX>o6!/5%Eyм'-*;afFWv) u.sWԪt[*?o@&N. ?]8pJo\len.%'MRy}+K=*--yk-B.nݤ[Pőgl ϸGB) ͊Y(lVJl9 9xw Y cskV\v8Cns=xkZ|XDZB]!)&=#FeeGml_ SN"ͯd3gdW@x:9]ߦ0[=VUalw@rY=L8I8868a)A^oߍ#$*YզQ>*-A)nөխC; p`\&N-r CbA/--8E4pJi#'&P\t(p@8L8[rz]0 ˁ*Yb+cKBV@&Rl5M453 S$q p= ȣy?|S+H V <_b1DȬ @Y8Miؔ*ffmJ/ WLjYu߮r4ruen>W[ ,ݔgF?c1CG9L18E%~q}1ug?o eNeUVWYDws/@Y_E^҈uE0w7<}Ap՚?>_)wy=@;FpBa`Ow<%X x>JH@i6GShʁt7P^Oe( Ӑ´X=ej%<C)?ɖgY0àNOUd{Pg'7Uvnp^~h;WmyO~z}l;#E?rp4N9ȯ^W&vEž)0I9(=\f4%Y>dq{R`-yrBbK>4Ijy* "EBan\y˨BϪղ|0sebonӨ.]c 1;Pog쏗 ,nީh_wx>/d=M =~-lVeNikQAg{쫻`w݇Ojzw'/B5JENG7bl/ )6oj=YGH| !A ! ].-BjvWx:^ ņQq//vZ:XϭCY7k+bgZ`Y +Ϥ,_?; LRz2w&'YJ[ukLw玬.n+_@)gU!ϜowZ%28U[PpIkCjc0l1ܫcA rJrSe1E  d|Z[Zwqm<מ@!8aSF>z-~}cnoFq&b vvqb3k-Fp͌!VQT[Lz}y`wu : 5F(TRKb+e&mE %1eT"SD8Jq<M034i#$"!5+`d #J-i~p\#nt *2IO -gbQS+KvNPh&o`l/^rm4 .1G.nLG=.xZvr[m,U-Pf/UkjEj(ޯԈ3b7U mb(6T#}E/U8UJ qf ~ ɘQlFLk),xD9S(-xbyy*<`ҌHGJf/J}NI'λHxqq!e{\ZfًmAh{4T G 枂h{b1p! x` ` V&7N(Hi3J/AQA 5YNØ4 tj q$ݖ@H$I!IJIQ8 gǖQmO V8dsHQd4IT%uv^ |Q18b'0ʅI9]-yȥ1KC@Kc^A۶SKg?إb.I # b[E4n==dw?˖g;MvC{룖ku,W|kQ2EU+'J,at.>ݕG= Z6[C?YC/5kQJ.U_og_lRZYhldggypChnF%bmT֭u?z%B:ݮiހȂؤz{*೚k mdQ+% fd`)BRBWCվ^>x萝,){mF/G1) :i JxsY=QAفa6NqA,6 |d@aU]?aJ!JT"4MMTUV'VGO9ˆԳWrTA ֳW_TA1RzUBAPJpB#N GDES7!ptV} ˀ@'%5z9F-nē۫-Exw~MEuOzb[TV9ugTʣ?()[ ިp*%٥88%i[3mBs]@Z>o^h*?UyжঽjU n@hB9"p݁m9{'9 y*VCto'/3 Mvx~;g~S[ =E#0s{**{Zo޷w>{oz%DӍNW7d וAƲ̊:[XVnt;^;{Յ#s j7Aߞ]wh`G j:pzpɔA-84>niZ+}Wh0KӢWk`bcL e;(4Rac\?{ů@mz ,m#@-sZ=:-Mޝ&&h~M1W]?xj̏ V~mi^Jyxxl̆.+QMH3f>Znk {pV9(O[iZ}:KvB>cޱcލWHeaw+](yr޼hz,Yfcc?_{쇣3s8`8 T d -I[*P˯n]cx}BY*"/QX(8B.2'׋]S/n }0FZL̋uG16Ӥ dW@1YX+&;ʿAh1{n%cX_K{lz\0L6Pv' fj$H&"d#0Ei Bn41D!FH#QR#%Z ҢgN2}d0Է\sH7;1f Sǿr0R?]1w~+XM#;C 6S[J UxaHڏ%$Xj2ˋ2똈bD"^Sqf?EB4=gpϔВqthVԆhp=#.iߢ/NWLǥj  UߡEFccwe-D~bә_W#EVWխNc=y ew˕a009almeHcF E8NKT \fd{|Ì,7onNm4jDRB0Y~w˳7g*)"Q1wdf8D`~8۝uWgq.]}r%9La5F{$VDivJ&B$8e4PLjESD sx)q!3TP%&bJHE*Bc\yAޝpNb3} PDB).H p+wET 0>TI<ѿ|AtI'<M)4׊ R&1$n6)I472v9DJ%uW8NrcR滉04Iݝj E o\{q>fDXR2fmlxZG Q$:1i/.Z#)Zp'_Ik,b\I@1CSq2rƱt҉6/@ksDvzuKڄR9b""̭OQ'N9@)Cy*8.Ә5VHSk6qJ~FUe4E-@2u-ѴtM8@dG/leiFReBĉH EI$ 6)4ʽkDa0N(cN$FTpZ ( |LUt9,գ>א}2w?Ndg-Kv=_7=.h !3뻳Q:+?O&Q~HI,ݼ[Ql49wmI_!őwW0A wA OyH/I9oP"䐚ߋ$~k,>>ôU@GB_W/`h7],HjdϋEM(Agl<\,6  . )\,S*br~;~ oՠcTK|Xs"백joﻗS|WO ﴰǟ~}-*eX[?YQ^ؚ2x萘F!}-08>-.~s3*ֻ͓o|s8l FM^MB .5u'>Ֆ "Fjn(Ƣ&bpzN/ /Chgdo1_|X=;OtQ_߾yW~ss.nӷY~'/0dx{)ЉMt,g+t~ϱ޷LL g#w:~fZC\mAJ1##́'In7y8>R[xsSc_Jb:{{k'A-J*FZ%I JI( zxYO}17YF.f_몎 a .2~3RrS4>r^dQI:9fEV "AURdkqUs%d~k8ιsiH"5STUwP`j-leDi">I!1P'@ZY@KЄ3 &Z5J*XP:h&4h]J&PٗK4bN$3r Xv)TWÚPNQ+4ϯ39(O'.Y ͇).h ('v~Hp)xN0K-WJV *rWi 1D [lR]]%]B:Jmt4_f! DK9E%swA |JZF^KJ A#8 SJB UN03ldX[VIzS'lC3yE?SI YLO ,`RP{k 9m9*f8Ŭu,/S=GIˡ+"\ ᲎- qѮ NV1F`Ԡ?/PSI!BFNJZ^,a!9jx.戗_z&*o3"OJGwj;늡\ Jя'M-jrdcꫤIMUڱ< t=n Lyf  FT2N Mv1wܑ*&RuFUn5 rdI!GܘC>7 ԾhdMDҫ~-FOjٜOfB}0<@6 LeidRtхu}IQ6e'P AeHiO;t*'Zs%G*3 S'єrXRi|E(N y$-U ʨhv%)NM}Q>ɳfnGȯ߰_!!m P%;0w7VNv A7WN{Ym"Z/VPirEP^mBJ_&%M\J+gDyل#Ie&0$B p|\ ʒFGVp$ 脠I[ ɑu[/E\+wmB@G0 DAEW;D3yF60hS}F1R҂ 4]PQsQq& hT2PK3D0|DA4ts|Q|Gy<"VУF+Q*!@!]klei ҃9ŷŮNzދ#TE>89-)3C\&ib;z 'vTMԄtwY9~}:Q#nCtOa e:Ҭ&W;.9tb$ki1rv.F .hR+ =S:+=' 2#5|"`GMU±bP9I:@5GO8eHE}"akޫrgH?ؙ[5o;_Y-wC~xm|a6;\>Phtwܣe!KdoNc1@c˜JTȀ[zpyʳ^Dl7ܾuT#5ŀ#hQskU |W#W3yG # 15 }D^"S 5LĔ$)l&5"Fha@{\vZpj II=(yg(6( %%X¸?`: )NSdp EXψ6Q^U,I@*caDlx[fUBB0z"=8ACe{R?[ޯP#uhxR7pZN;V( ب%Lkmyp˗ {1$.c;U ʶYBֳ\鳠Bs_P>Z瞻TM5zO٭316׶SU2&IV'LA,S~ud;: ]RU$*vu΁14_g#(Q7ٖyqX&*FR($ UF"fԺ1vݻ D!1"c^3#Rr8C$"r A$DoqasP+1R܇tO띜TVԨGi-ǝaOתo@X!\Otk*ï.NmA⿈UF);0aH0-G6{Ȱ͟XvYUj~4c]1iٝ=9t{K~9<8_MAgk1SC^3jt G#űj\,h!=T3&6Dׂ7b;i1羴|/ȉ` ;0nkS D fL%40 D|qC!e[qC\=]dDӞ '8M ߂6G7pkWonk2|; S{?ZOA ;~ hQ:L |iu:4u+]ZoFRk.g@[5ISWp_ZV ~8Rݮqn~9{-+/gHl~{;vhXc~HSZj<5f_Voo\2GbOejy 4$ҕ@o'Y1C~zD?t?Ef3 8_Zo&f_~x2zx7Z5ћ_a~X  taO:U>ʦ3jS̸cnq߅qk}+pK {_[]壜~zp.VqT:(*K*Eٳe;xrxw@NH !3yɕ qesqh6Eѷv~^g/ۋ ^APE`<_ .8{ E8{O HA].e36ǒ1ŭ$ < ;ahJм&ޛD 151)na䲛y K}+x6ׂ}*xdg8۰[fquu݂ vn_ïEQc듭].8%ASK>d:?*[5J,hMi,!+wi{|V/3 ɤ@IIƂۨ` 2hҙ6[l2#ψ|K bV 5E0rW|8'Q n x)ҿ29mC=rŢ_*ߕ_Y Gql8F¥/p`fnHM#{;g/_ZNi9f**WnZt S hqWL t7n>1Dg@Z˻-Dr!/~70Ũg2uq?&t  n=J&UVӪDe >\ңP ؿ/XB/A2_Cc;OmIJHpTu& ~!#$>ތxcOv]PS:^Z71EJT iVy5.[BvoK%C] rSxru6W{zET(W`U?Y/ՓZ^3'VuN7) wk܂J4ΦIrY7*i?N47)e/[^b+ykS~wѳtoreF2&H3٧I0*-{25 A}Ts?2rVHyAǫ"ݠ+Kں^VN?KR`0pIu&en%{lG {Ne' _<:_&f*˘i i s)HwVImjM;*JrI{J~|hvu[pתG~֝: Hm0OZ=gI/tMuvA1j#џ{0j7@6 6nφ{K׷Yvgfi@ ¨ՆO%Zkߥ[IzTt,-R֊7ލҋI@oF!״'I$7}TIܓX5i//YR^=Q^Z1Vоpȋ b"uSwV4 ¾@kSɥk" bBY34Gj ZP Ҥ.BjQpn^[:Z[OLFSP}{luhe:(ΉYeztR{| mPbX3#:QF~)x!S\#??Nt1?Raϕf_~e1UT;IcCKQDkƼ&H\,Y <( sZZ4lF7[+zzQAD=M-Dxξibxh 1W.~gxv6ogz~f_E#vT#kO$kfG+qQKQcY;Q(N0>>ݿ_وs '  FF r ҄䣩qQm6C$:T`;>tso}~O"Sq8N: 7raD(DRجm,Pg%t& (N"ÝFD$ 3`e}8=lC\-t.5"W7F7w9r{OB39 7/]ZڛF"N )g JXD*j<INTʱ`P,zŨU$  jE}cP+CO@y4:? %)FÌhe+PMkW9iYe蠘#I0`"ϰCR*LJnẐMH+`=lJABEP[m 5D1 oYABz*qQmUDbM .^ӏ7f[ΤA¾M| C袷ܮ2mDo/߮b׹M'D,Lu+%dNclx"[?Q~ce|g/(ݴ?yq $:2B l&!q{ʻGפ  I`vꐵR06 '.jpԀ{ G@BtX LpVHu`h?KG-T! hpo} &=Xt&B'/[~>8F8کKPú> W{:|93Mkk9`)>|vb:~.j 332#xqPNLǬ gcAt~q ্Z[IJbUU%*=7!\ *u! +LWJEe1*巓E~P^3;+T`T8^y%fW^Hǻ0WӥT4~΂v.pĚ>#ǵXbk,Փ$.EyK*у)|~dG=~i~`'7[Ȟ;;?sa2˖T>u6O\% f[%J mջ{;n$ܞS9S%tw7W(Fex3,icxVS"Mó6:✐&}ǽܝ[Va`Dc>L(dAXÃukf—a=n^^[rXBL(M*iԢ)B.۸q-^3I˳-$=:kAF]pTYWչ@(h8Rtڵ{I㔰)p*"1 ֒ʰӺ~&.~ή>`F3)Lkh3}R=AșљeLzYIXi7 (f+ 3esLN&{ֈt>IhxHoŽLF,Wnw{;vTog..W8=QlY`Q[`GëQCՋ@SRU"** f?޽\ =+UZ3q+|ļC[8~Yv;7@fVsir?QNZl;U@D*Y3ȧ"V)ݎ|q>)FXB9o\Vw;J? ksᜉߣ.)q;מ{|0ܷ&mdeTɌk3Όv>y4g˨'U Hyu #:В6$9R7k"{ZyD⼥6SOAh-z$.;DL/)#"YQ7ʒoUnOP/!8$^[x{'xJ W!Y`$fxXBAޣ=㮹BeqOwOj~e޳[+X޻[ȩ9`S>wMgR\KJ /dm;( w q~3?sÛ[coI]9/o,Dal`BڍѤ Fv S";_~+= e'RA}?Џo$^M-Vߥno؋H$:k)VV2zcEr젤? oZ̏LT qi/:_A2sDhŀB[;=~~ĒNoGJB{t$B }_ /<NQ40 $}R)--MNIT~fM~O!iJ(jŒ9X䍀F/;3' eT2i20Ψc)lnޟ?hhϗ>ϯǒ^|/1 mdC X:X{kF5H) JX¬J)31X )h #B^ TKF0k]J.\*P H8!`! ނ-E樳 iED̲TsD&38 kM0L8k=R/X~V<67f`7 Ť|EFg_牁>34гKBg֩1cLag]x+0;7_d^G#0$X/a 7cPZxHz"&%~U3,^;f ƔRb`*FHQ Js#6{,yn䎀1C#Z־NNhVh BMW`qic?Ym?N& UVE*_?J%OtxMm .݄,<*ݗHQQ}+^D 4FWšZ7f dHp1q_#n: Y ?)^AW ū @f&8IR^(Osmc,^ـ A,g׃ã=F/ibp9 U0љOrG?̿/7nʆ޶\m銴}7X.}?>]$@P/\@^+@-[}& Z ػ6$t{0;`Kl9i,Z9j̐LE {U]]U]BE -"TZT(&8P.D΃Rʥ+8N@;dZB9lAL#g *uAs2 $ ,Mi(@KɅlomnhP-lfbσ8ۑFMB 'f֌vi; ^}@gD" ?Eމ ,H!LD6z?߆" .FKt%y)Ưnހs_ ~$yM#Ο~ .cx 8%:'gđ^IP񊷷E&P o^]'9#l;͟$TKr"Y>J [?v[lΥ-V8xD,f *h"&R`5"4 GS1Zb4g _N/>Ox03/`Cf{bXT⠱jĈWprzX=)\|t ,Sf7d"J>mֱ)k Oi4 ۏ<,t!װ֖~lj{<65yx[SdjkJF[FULtϭ w~޺=OSl=MG)"(^zwmaHB.Dy# I`0 u3KÌ[D=򞸨v<^xyG" P-%ӸVQ7цoNGS'aB0!^uw(GhYI'lGY{Ԍ6Pݩ%-Ac{[DQ]<)ser 硟YR"k0Lf /^>Yf~uq(N ^ u ^ u=XO/]lєHCpn2a*wކ'9 Rd@~(>_TJO?njjWotSX=c']ѧF`ƳZ/IU3w) jߣSTnpA5,n](}ǜ4ip? Gp2=Uik–Dix~~̕+2s7dyPv.=Ӈ0rؒEH!ǪvK)hR rDMOMO4W!!s=X@D.Np:V wU+:ؔ.]mȼ>xoFO&OdAuoݒ`O[qҐkzzYc{)" N%^5UkWt=gm΂w bXUE0cHTeE$*m F=jh(z;L0wiT2Ϣ̪k/p&!^h:-(FG|v|9Ze!h(RDa 3a((G6c ܐJIQUEfS X6"4 m!DNXs+@ )X0FpdG2x#e(`vDDضBj)e2JWH'l'歷ݧMvrR_ N;0Q[OV9j=:kxòARl‰nPō$3FVH2 iu{0v'KQ{ 0'q%m[J|k-FOxiHdᓥjm.. &?O5~~ `yN&wxI1!v/H )cF ` hgTCj19l`Jѱ21U ŏbߓs^ MDcS9fL#O\/]&p1\xB jŻEլsA:zxoųF+EFq?,:^C0~sʋ&߆z8aeΡeN{~ _>UZHx?E ՊWðp$- ;Q9T\ ՙK$tY]tB%@NqZ+3E0ru)}$,Φ an,54=q%fd ~;/jLfCӯ>ൟ."Bz-Tx2]6?>EٟBg߹5=;X?aP)A 6;ͯJ 8B`k+բbY<J0Ƃ\p-W4DrZ ~=^ѻB^,w嬜$ZuvR0NZɂymƘ":Ю~ޚ|_9 YoyB?S{jcK-ʍŏƲVnlk0U|?\kϟh{>G}ٖ k5Á˝;QLICmj_}uJ@ QZۀ˞ 4!NkWyN[JX9&ݡqսx{j CâZU˲ ݜt&RQBx{s&_ƘX@7DxgvZ???K>š68.~xYE$"vGemeXq6LW],/a\VWLnȌ+0yu;?—Uw) v0uAf!;1t{J%v-'`meJ?D#QoV^(wz [5- N 4^& o>xYgؿX!r\7/&wƹVWH~00 G9"m@-v}rkH,DEUJBiGy6ҨVz1q,, #ˆ"-ç^Zk[q r}f0|p ,*}H$R~rW_Kp5'Q ,Pg B:C&5J 'ÝFDṚ%/ii#.Zvh#AyD`kCFT 2QH5p E);g #r(*ZQ:P"3N/NHB8w)Ds5 y~E)U]*)3z): |3|Om_̦4Yت/hI2̛$@]io 8hS)۞Ֆg ݪ8o9/hm?X%&:PrilNKP1ޜJW{A*:K 8u G)])242_842!$-]l%>qȺ`)d#ipࠟ޵q#_)ed #(@_vgwo)Xj[ 2oYua&3eNF yu젅ELLK3Mz <4q^X~nc|H%eZ?)CХ1ȝe.PZ glnہ#=͒ȧ}FNHm׼0{fY)-,us(-G-hF-ƻ0;R`cs Uhi:3ͭB?)4eS)1ϋ_?E?L)]~7Wǻ釋Z{nˮapEZ&T`z5aiBÙ vߤJ&UB'v!Rv^l%x 嚧l]ZcWsp*GFU88}Zg)69bp׬BgBaSA:R4y-xQh-Y@D`QI80坎Kfb{Ob  `- W.fq{{}>ha/__.':"Լ JGq)n;jM.'E'l&t2 `@ `h sO$Y2$!xHstKjM7k#O8a \ƣ?'wQyAQ;"Mf4o~.:gX`U]ཥ9C#|~?]~uw1JvGbV ĿYo#]3vq5GA#[ٌ>E2vfyI\2#w3irR^z{Mf2FKvƁƁ#n5G{`FNU7\erFcZ(H5A(T. 0b.(pb EK\8`_P Z*tN#koӅGQ6hV՝я)W_bj#r4W_ _fd)v M%ۤ=У 5iޫ= ?}t`tsuxI/3D]WpjxUlgi|Cp9>; Q(hۏ?ް☍G̏#Sʴ\W0p"%Cc)olnx{!QF{! n_oEc4i^|OwN4X2wvH\l"^(؀fXRJN]٨"P mYŦ*S|655h}Q}~"nB$@G5GT0RWwwf|w LEA&켭J+)s::w$w$l@L0$ {> {FTc4IFVHtDmm 7JK&d, t9ԵMs1 o/ v @G:0NPT*RW˄zj֜!)SHKAo : maNk0F;-ÿ7U^9M~Ѧy??-/ _'GHf&R=me{apuSvd~ ngjA*΁p6/tAN8-. z?]%g=J؛flSƟ )`ݵM[E+ ވp#{47~,HT^Im)q' 4zn^+~>`%䇂|QD9/ߋB:kL!fHQձʏl[A`DK1hђ)5`R|cktƖb֣%S?K3LB۲boj2pH)g!G6&zj|sV-l/@a{i3S~[wgasPgfib8ZS>Sm#ZPv,/lwȎi(i;k2聝5mOM[Zss].SӇֺm| l'f3yQغ\2rBr;xuv_={rz=3aji~h_ʤn|>KqeӨ $աQAr8"T<=}Ѣ4y=x:0xoN]M +  TFTca{;HO ]\t HQh,d5x % w)-Όf z9iJ DMjBj㌆`.A1kr @KaZGBUt`x/I S$\[k% `mTv1X=XVv9>2D=iV 9'?ok/1ڛl_XhB)0z|߿\^w5}FHL`gga!f?ݍ/ a)eQ|թWOo”.Oo\}k% $(SqUWyZNJHiU퍭kI -?4O# q`upjhJ˨*\R MnicŅl)9iy,]po'kxiaHOY\&F 5Br2I4O/(?e)Y|v$?pSQ$'rZ(%%P&K?K!A)_pEt؝P)̈NT=۱?'NaFZ!=V ]U2S-m-6DW F qfUҘm!꿼V-0K![jr3E"@6i@?S7w VlF؇w݃`dj*hܳe[\pae2lK c5< R;Ah )J.wT7Fe-4} 1U*bXRl3*@,i*ݾ0K1xE_)1չa}zzU1miJ xwa#, aKWx"I*6zۧǨQ!j(RMFL987S>YA94 !H* T EUY.r|\L{ln"}1̪A&؃6 :H Yo/3ȑ~+ 5Z cZNGJ:#Yia**cH PkkzJF14 )ШO]fYDks(_|RZH3Pp J#WO{ReȕmcBLG!(C@BujO`jPhS3V ;ڐ Rm9sQ f Ϙjʟ/Is0ϷYl+N&r\7UjleMS2 Rgkz⛽{$obG_76|+oncvO y}~pߐU LbJ<,XIk3+.#QL)Z.|K߰,]_P_\EiCo뗛0HY4E\ݟnm.n?V.n?,.BqCXA"J36 ~`^{Q}+ĹS#G]?~P=~;1DxX#O?DxXa2Oo[<zW,?ŭgfk~zNa#h l6ttwGLJk ~$X9{>P qYǭxGYסOM/9tZ)0aFP5E`3s8J0 5&:T` uRWT+0JD^–+[4VbǒRBcP_xlu%^! ) sR.8[puHq  ʂcCA%H~s*$HĊ~TBtRU%I/Ido\ F΅df_;x}5HͲv_ NWcRX|)l.]X14[Hղj \ gɼXn@gk'Q:M'c|!# x+PTraY^ .d__}q.}p~wɃTtϟ, fk[w:NCϟW?x2* 8u&- :b cN(sWR&`:KM3&QB|,˷^sr=.ZZ8 vVe'Tjy.G!ے RؘH7-9$6NlL.Z#TV, ]%%\%F 2RmIC.,:c~[1 & q!u#LDZOÎ43R8MD3+pa[v 6Bix `|sK X(8R0)`K)є3`L cxY}#,"8%Z H$f aaJO 1`1Ahbs@+J P@5N;e3LfpBbeqcg 3AfH5hhUjb<bk$Bc5DLpl|KРȭR썰)IJe."+`(XXNVV*)%z!:+d`[Z4]aG/D*Ӆ'}k2% AXw`Zi>H-wvs.uˌU`GT|5jkB_l:B"\oy=ma*6\>t\IA4aU;;l&GV/f;_s=o:p|'i]w>Bjr YVMQ ]>W3W)Aԋh:"* &/HSt4M'VV^ox(В~y7 [W"7S]u "?^;NMXݯv!@K:td/~CetxG-ohw5XP=^&a CRzBs̯g_&yի?dŦ=>qr\7t .t0 q<*Ew,|Vf_^B;da-;2 m,o.Was,K]kpYA;:Bt5 Wa,K^kpYHAu&N |t,'9>0G*-7GW;s8G]wwXntfO9'(Auu_s4@9'(Uxg8Z&ɛѝqggo;T螣n*|ɷvQ.Mr۫4RG^zm,oGS(V &'vbhNI8V!aD`}By*$Ԏk._B]õ ЍiZiu1Վ1kMNZ5k.*C\^0XMC>+`քyq3L&=j\s^8ɣ%Mb`ғ}zYfmJ0 h2'&1!ͥG;]&#p%dD6ٮ`อ-jH\ 0iJ)n̅?1F9Qh6֊j*'ShdP54f{4:Oi݋x(LxzfR Ǜed.{1v $*.m]*abMbUpR*antuUl)=;®(BՉ"Q8mEȢBLs=h*%,*oE~m]7SQ;#l%Y 6L']I ܬzf) zUj:xV1%ZꗷoEJѪG@=0y'@1Ĩlbu,sEV/y_G .WLM74.T?P loUAhfFsG:sIv3߉2v(~U0 $jϋ`jE %gLhGzVgdeKRĄšŠMQ95%ZTHF%iҦiD4k%pݢYTzV?[OEe-S5lU1d0-Zb*Txզa@Hգ:8ڪ>DjDDoMq`<r^<]!g[1$듥q6Y[kztE@B PUݺ4ۥIBmV;\$ Gbcuȓꤗ>^A)dzfM]*bNpⓒ 5+LuBVifJgnY !zi5[f6 p! R k%&Pȡ:I= 2RM{]M,&aِe(j0TƉ$OtyyBKYX2Qw׸$SZ "͸8]1'2;zψ꽨`G; \vOi-t~/K2 sѷj/T؂*mR-A5RJK3^JE5JIIIԨ(Qjk1fL[ &0qllH$3"x # *IQ(e˷#?`-^hvb}4_%jp/'rK'3^wO^#Q>2F6ĦƘ%K(qN005R' 71ϬJR¨ZZaDI҄P L'4 ڄQdp&PYjoj%$- P%D _ιݤT "5>+&Z RL`f)tbkJcXl6f-zA YEXc vPCY m3)`9hƉDZoswvd2gdm䦃n^3>wv"lٛ`ͻ؇|^W,0?GK˱ ^lzhYv>RRj @*gdkYw(jôAKT9WB#B14G.v|2N~[;%~{W㶱e/3o^}1l?`q(Ŷj' %R*l%@sjV=…Dhj:{Jݻoܓ9r۟%ol>䩙 ԋ W d@G_!8/U~e4)p1B-cbnĚtE}j$>6N]ay„ i~li qN~ܗ R)*aӻ`5`dQ?K[ V­ivcPXK"UX94R*#68, f1hhaO [!V= 99u"wc~r7L|3X|??ß.JDlSwٵ0 Y|py$. 3`$Mul)T?Z /b" w$BA*@an[|^}HtlDuJ.ia.Ix\9}:PX}) (5엃J끪,R9sOHK,~~v+C꘥vOᰊo'}pH`1b??5BlxRt?epz?|65n HԞ{輈fjy|C\!RR~jd/H'թw]w^-욺.,|_d?ûPCڼw]nvSfP_FãPiD E04< s߼1GXMkorBr^!MAT6L# db e2}v2~O59m$f0`Z0%:;0wOfE] +6D %+ 7Bb\-@0:A1 oGtqr*SiɯSIHd$U,2"  BP,0+DB n khC%J]nbFT\g:  uq;$K0JE: # F(:P!$SЬ|Yb,1DQKE4a fL! cS1 br$3iW4GOX*Jw#7sXH;tg#֌~JUNB)8}$AZW `E%BBxmP2Ih",3nw]X0AX.e2IL+{i4KP iJ#}/ V>ʼnBB!H b`U8t>lm¨4S\S[SM wO2.Bj u8dA'Y yrDn^fd{3rC {Oi)/[ׯ32M '(XNAdޅ =~wQ-^wO޷3ױ6BI0!ݶD_UPoF/QzHoS|+ɖ-sү^ݾu@t 6m1t>zsrlBA4'e_;IuQ}- \f%Nk[9C8굟H|'I)V. _4*8݄D{W4ً/WxNjBDgwza;Ȃ\CM+>H.gɄS6f,S)JۉT H5tu* rUI<'to3g{sƿ8evp8ko;䅝߾yәoۤ<}kuu黴 9va2u:TLk"96~4&*Sm-3Qx/7~STbyA - J.^+ı\&$T'#Z*QHX$<'!yTE[c( $9u¢[YE)+&P/7$)Ɨ@]VyWOJMPnkxf.dH)'&TNX$sZ9J#;4-xmIM| Lm2qnK*ߟ]Hw˻=ϟ˩"1gU7ש$ET$?ΞȮJ-녿b [4RCvpIڧ }=p7^)\+xO93hPHr)ߖmf(FqOv>BZKLGi;>k)%k`]c6\UP]~~x&n;w/@b% 8+ҫ _աr"bkoI1juQ,}`Ǒ{"/fs6\8Ox}sBf2JUzd]{~%痿x50\:Ziw BXY$?on>7@d7sA۫;ZGFhjj‰u&xBwW{}{>߿H/ @C%씄nꢿXjtdNwNf-D#-Wg18yA&{h) ­!e^H罽Ho.c|c ;2=^rzTtscw\|5]Lvyu{m"[>i;, ?K>foy`LDB g(oQB2F/Ni6Iʕ G'jxekh~s,EY0]ⷅWpw%eآY~]Gj)YrbC(l)TOy+|3R2)Ũд;F'iҧ;@͸NDlFݹ̧VKͨO_`Fe\ jws]]z#66ђU!Uzn5! KXWJOw+(=u7Rz*G=ɭyQ|[(KQSX{AGs&oih',|O'=ZڸSKJ3GB npǪqv[m:!݈(n7o2Q,~:+G*w[Kk Xf Lm|+#>OWHq8ŃFfd.¥X_i o3 F-H YQDaLTwd]*%DO *($+y˱k g)lKWRie  ܫaҧx^9z\ wTL Ӻ_ z%.Վ˰%)] ZkÄ]BdͥVQYR6[3ewvZm[L(dnI{0Hޝv+A.IFT]nq)Ia*WZCA{_s- -M 0B64$V <' *I4Jd6j<30xЕИ0xȒq#ECP%I,M]F&J)ad+#&Hš:NQF41(!ATĆgLLQ4E%S)*T1F[cK}jz|Bb#CD qAl1Z""ỶeYxuCj:˜7JbH2a%F)1d\ƐDȊ(d&IHc!V>)ҤoA"0K` 1L 1A 1qlBXB)L!`c ]ьݑwoKh;ڿWSr RǷ߿rv2Jhb 85IY(}Y߾y,³3=L㺨&h7a:,Ijqɢ.">g~ȩ8_"]Pd҉ 1~"XO0U_*2%.EEOgq2ENd;$Хdu6CC'WbKMKM_d +oze[vUiVek򳗾ϽϮM~F;0'6$(|KJwc\-'"Y1YU[wh5ֆIBaqұ`" "͊$SE!(VhfkĆq"*ҋ94R*#68t f`0 mYb82TRJt 1bga;joA@gpA+1a(7 $<0cHse=i >ͥ//14-[b ~hm; |xraUPmqz8ko=z;kap^9ꖽE#r {^Tl,nR^1?͉?U^S7z?A&A2d?{xA5&^^LAJLh|)]ߟ{egnʺtyg7wH¼apNH o|J[ǂǂ-&b+zTQDx º4-~ Va+3I-̩ i}1'& ש#066& P#kC Laz<2[!LØ&2 Q`őK ŪA%?2?{Wȑ wE7<ȳ1E"bFu,VU%ݐf1+82##|pYad I *AQ& Z@XCS(F=Bs . R o8q &=%Q:eJaG)_R0Ym(mhg#XOx׏C\Ғ,QOYY9P~xADO?!oޟn6ħ"ϗg).ʿ#ޕߙ*;7`ND6ڒ RZӷGחDs*)٨͏ك>ePMu%Gk]Zm]\9D| =[|:Z4ḏ9JknP0qwB0l%7Z$.P)N1B9LU Ί+-TUu"AE ؃fԛR#T10"2`zKHekv8mkŌ A> /, &5:J%ID8 !4)UhF݀Oe§ΥaFH#2H!D'-Zӌ7OU >iv2ND3% vGI21RyI4/5$Ru0GcaJ+cx-&@,ec;Gi@[!&PZp2e3@X BK'""X".˟d p[̬ˉ[bf@ dP s5ʍ9f\rI5%0-ߤNkRV8ƽ nT#N\1)b!ee{x$"!~1<^3_}; ݜ(Z>$Tf%(ޟj.OĒxoU}e(v;]Q!"]x^ZtSZ 0K6x~52Uf\QzK㮕vwEヹ$?sh c:30pojIMڕ(Z*(g1|~r=xmC< 'URssKp!ŭui6}mpb2,>:k )ׂV u Ёb 56xLDӮg*L{~:XO]TEXYgse} l }"z I \emJ'P~)Ɵװ:Z 3r~=DScM]x5b+ ${]EXk >tFztcSPOoYԾ*BS'fqt/F+Toj"/ ҏoZfpW lGAC[$ٽM7sv&׃@\Do`=xuF'{kW9s)rHnDx J͜ž:j,AUYɁs*$$gTkP$\y/g|x0H9cTS) ;5Bxx/UGYn mc.fVu_ VA:~62zw\C)8eqgwn'kɚYJ,ߺ/=q34kGWu~ xG#3p;-]CQ)ݦMmv'No{JEɨ#?!B8bQNNɴv/dm# o NAsS6:EcNQ8xj3]}+ B0фhUz<`kr2]Z(BDO9T0EtTBY0p =4#:3Q;l67* 8HCqS%\N9OE-\i$qc[6-_,1 a%:( FNq\}$KXF8Xg*#rDrmHۢo$ -$A+䧃tPR+)~=sS czU JRE3%kImerM);YOR0ڸ1:R-|٭qqkO.}wa̲~Ȭf Yl04)Lhm20T[ :<Τ?bЎw UA{ 0@"y߷Q/uա];W_N"}{lТu6= +[ϕ0 "IPG7oP>?C?K,fg-(Uz+(m>Ѹ{cZ)[+m1zJu6^ˠb#'D*XL"s=:SE- [-"€"uPYl0D aw:sEp{My&Fa+٠Z SKeG[*YNV:ЀJ H5 F5/TyN<=I+j"gjgת3Y^;Z\㥺3eDo%y>WM&3$Kx@tCo'/!tj𡯢>ykfEjv\;1"оt]]xPC7ssGo/s?sEbOa LC򲵴|ΏV2 ʂ $ߴ*֚>~:oW?1GS-ZuzrVNL%BkIh5--&*YJiqz jO S$~:6 Fԑ ~*$a^ڏ']p)}Vz2SJKM,,`B{ -1X'},vc#!y.=m<`⪯ֆȬ`FgqVUQzlRuUg7 !@3SOw KX!eF~'gn>L>g~`;ws2Y'&jui2Mcd-1'o]1= c텳Ar\a#R/gaW 0c0w`r:{%KvBe5djyEl}G}_w+zKrD-Y hNB9>4'kebwcFVwvk f]hiM{"NTtO}q]]_ٻ6dW 6d{Fl^ 672%"T4 {Z2E_UWWUWW1š#-G 0OtfSNǩ4O z&x0 Ow]AZIg JlQ^2uN7|iM$#,c x 'MoB5(bUcyqEuշA<ÈJ3O@<ˌF|00 8/.Ĺ0mxC[9~:AҨ'݀P)hKؤ&7T x})tvNTuSD_XtRV8@&|K6l9_^Wd4h~8xϑe,Uݿ`;ZMRGIh5I&V7bdHB `SK)! kcH*Doi>V"Q, TK;ŵ?.TAbW FCj,2] !{s:uKr7V?0-@jv%)BjQ}(rA(۔}U#hI>iio<\U)b헍URuQpcs(U: Ý-՛fY/@!Vf6&D)F4pBNhI>ឤI:ឤI{: \:ohK;-lTu$(+6( h7p23p2[)% Wӛ%5 NpB#i i#NhmtT)oa6,wJW ۅav*ϥj.;觻)gyuXc#frO$U'):IAI9Z6,gSbތ@k[(a6oFʸF:+Bm/w,/w $יƓiviZ1[:j™t4PhU,3\I!DYxfn\޺N8`mb3u#ˋ:%\Ӄpvn ?~)_%7~޾ ^7L?f?]Afr3Wf'g/h _#Atz; o3vt6]f3|>XA[TWBO>2c@ Mk6q~5d\q9戎`s{rfovt=̻>iaXjDc1Kx& 4=cJbKwq L `yZK q>ڋ3ZӶqXUGGH\8%\A1O*ZOnnkq2ޚ{E9+yUM؄rV^P#:VhRXc:w\Kce52hPqu ǎN/B<%C (Gy3-aA5c;RW7oz,_2[)+k˱7Ff=lI&=|>)xWD͈txa RlAuH" F0?(|>w;jJs3GVAXT> BA{&J":&}$&:((ggP +==M *<.e Gw. !M DDe!vZrZɭ3+]9\VRҩ TN ^U+ĺnR1a焜0͈sB!qQ9aǪjh%D' ;F5ZE*p:Fx"'w2 $dŏP+0̴Gr04lI1 `8Q۞D?h%A7P,~ 960ΓC*M8&.MH?B1SdIEVg$1c>9րzp_MX{R5マ B)tުr BJa^J&)6 WNG:g8A/(D`@pc@ J][ͯfKL0Ihq3m&>kv(1}Hz죋W''X~5SNGR)Ĺ*m)W1Ca鏬VAI{m ,Y-+JC]TR1T Iҽ\jl4zec"xʋ4bU F(4ll77%QPS/F9g4kx+_*PZluMycEuDWO!FxʙE#JL*E"50@88W" VksDXXA,L#/lr0:]FJM#1L E@._8BSV= ^se-iLL|n~2yF5TdFsV-"UăU[-kѪas T= ES$$B 2+DbC-F>1fphfz4PV {+: |ar$esy(*;GS]UșD3)%ǘ(t䙨3GDT*_ 01!5/w%+gPS ^RPP+ E P غ `RNjjBsS-3F-+ʬ XL-7͸<^ͯOMoasHh uQJwȁKohBF-ƶYZn7.#޻=wՔb?ҿ[vˬVJQyw._Zoi3w]:v1?8>Sm?L?>QMQw=N~yp~F}=7Sl=h">,=yj'*=8*hxk$Ze!G3~n>LR<ڄ$rc2.#b6]2O Sd:_6 o $5B#ˏ#u#^εy-fs:]a p\&|y%_ɊC ?]zj}&s}n`VS*_9cOؚl)/Yw-&iU;؃]oIžy|úԽ)mZ.Sբ ?s@FE ?:0*m۶6OGUdž:<Ez 2| zL^TJy}m* |u9_OoنE5N/ŗ3~eڜ q. k]T?W5< QK^nz $b_rO؏q;~ p'ulQa:o(Ų=~*H3}y|p-#}}QqE~iADrg]J+9XٿeZXאkw饷V2+l.~zS^kSu_hU w,;w;8nhyqC{ƼU:<6> UA٧ͪ4/t`KK8cju(+c VS|>lFI=$&@2>tt# 0](r3b`N@b0gTH]-#rih|?ѤВ0}3J*BR;7gUqOR)e`.\'a?i|R5:FSlt h<Dgph*=r4̍J{a#>Lh;ép[ IxXl)+ 6:e鉌2"H! M)hX:=۶Xf={#t)F_7B5J JG(N5.7ePY cN_̗VGґ>eJZ `-mDzk dÊw:K`#ߡk!FYW}ZTITM$a֭:aܒҚUA1ӉwLTc~-/XIotP2M=ZS5h:0jP5]ϫ&n eBSwkkϑX65(.j?[΄vV˞@AJD5N͜&9Ȋi0LH(z}N@d1h6xO%%qNYU%UT3M")LIO-Zi9D5QR1#)uޫ 0]!u~ޅ\Y{!i1-" nLUvUyî*Um:/}yCy9v<何){ ҂Hv9/yǷBڐᐞ˻6 +'&B L^ܦPg6"GGY@hc ɉoky ٫kY𾋼ˎfm!!nOf1B4涣=[0& wA0=h͋1v 0hM_TCGC=AhԸz0pgБoյ='hewQкR IeƠ U4*jrBTL$*G]vr;5S|ϰ[޽Oܗ7{l2z̐9>Pٻ_߰-OgP3Bgu?uO_~2>.+#XeD9 g$WU}错k STZ4߮ +u}%\mXJ)3zp~M/U$gU`l;t+Ox;O=gZh13ϼǷ?@ p07$\=|<2}p%xg\%(1E9A& Vڎ@9|ŇDҼU"iûI ]w1#z0:Vh[%Rf֫ J޿}*{ yэE _^#3:\/o۟}p[n G[\>U'WTV r4(H)(6NHx~Fo?5A4hI1nOāIcO2TgtoT<7FZALBfr K) ͙rXLY * &D#HTExz'^;U)e,LT6*mA/*⢉!T4"2 h8z`\BY!As-UqpᎩ Na*{( /JʹzVIܜMU*h9B@3-g -'~Jgg]zw25UDl1as f=R*Af83ESk"/;Tp1!J;9߬k-(1=Lwo8LiEh:ߐ6 |"Y?q 93e6fY̔y5xVYLB=o( ids<.]bB#ws; ȡXZP1T؉cLL둯-΍8 >Ho5E˼V(9bFxA?(&;pK #sK뺖Zӣ\(}0N㊌*-(g@fnPapF"qϙJ驔,`*" zO' G4!4pJx%;rDz)D=) uVK1ԙd/݅JPg:fLPwnL$%ؒH^/ZN$ԗV n cߠ1ѕHFU+RY-&O8PP:rW cRl[J ñ?$?ոyՕ`DI( 6&Fw:`|%I+އp-x7&J116xuݶNk)C{z>,䅛hw c?SH~qb}3Uړnk|r^;utP*V 9͓ ,C?|GSV#BYs9;)?'z214X> _9ʫc^!nؘ>K/,G<\D S Ub$4U$|`> ]/ )  3cR!qm=\_eA֯|4o;휰V0 ,xuSD1n~: mLr L(<Ӫ b hvUTC*_*lť}e+(QfܩGWcýkJNIC䓎IۨRN7mITZ-ŅPf<5}Z*vZ1|z!= ިD`3atdtfq" W 7yA-{$BR| qC|9Elpho\r) mLrN ?B|eQˌv(S°I^q sC38X>ss[ǹY,@qcPa9"ʔ0#N1vUT8ޜ[0lZAL8gGE,EA@ǨtZg}C(H7Mt7^1"aޒZ“+mי Q?oHO7$ާ 7vf]`,A h_rϺ&:lP p*-vOn[۳zy;pV۳U~s#(m}ʴy5lf\ƻfB97crKڢj}n8yM`%:r$CDΤAY|UT`=h]؀NB˾m.sK,.2sEils˘UbxAFVa\qUx@PW9UFtKm - 8BkU>J )P  JԎUۜޙ~I=)o&}E7wEoNV$aCs}YA)9m!+8EZdC&YAMMյz֝Tg˞Dӂ DcQ- >[Ԭ1혎ߔ^')? ЧZn:d~eJYH'ӗD~sZwem$Izۀz= tgmF^ekGO$EY%P`Ѣ!زT̊"32"+Ώ6$Ǩ\VQv0'eJBZks{9׶R$Qzu;MvY̚{~Kj#'/Z,ZAkMdS?ͯ[dh0*$ײfT))=҆CϪ0lVvd{sN9Qʉ*xVZ(K[TMLJYrkᷫeN!^UIAI vc1O)kpl”P2xj@fA🮒'_n_>/Ϻ,m~q/եX"{Ee{wyx!l_b×9hb;9;5=F;^G_q gN -$Y#B:W9G7'oȣ娌g|&%CmkJPKB2{$ht}ߒL LK{zPתq7 ꧨ u6rٱXausB!͐(J(+*Y' VFᤐbIh#hSqqri_^'TOlό:'9[`s-T˯3-rw,mgnWzsCg7ȶhR]3p3\.MZn4/7,(*04WPFcF?1s?1s\@ :p&Hd2L2&i!sd`EpI4A,1zN4]oF93Odo)`FL,@q0My{7xxUcǨs5F6 &ă/p[X`E%+`=z '~Rʵ.S\F04qi;ahQM̸|nqybttC@9`OyS̬>U%N}]ZfBoxA;El53z-[b.G wqZ5](nـ np$6~~i#<>QҐG+&}ztn'6U55ݾ֮0 )48i+63%I޵ Ogߟv6^=.'$jb :}TO}0+h:N'>G?+ׁp~pZ;^=TCB[JHB9lZ ,/SJRmKRXjjT*J"(`FZ(TQ:x"D[-յԭVqˏI p,@ VY#'G *'e  N'fuXX@:6ʩ4ZbLסb5{,يFF]+qk8 ddatVC9j.%QjL!) 6RLCIAJV׆lZ$|"$DXU-8TaB $Č<:i <@v0\%}Px-5ѪZBpƘZa۔̗cnNQn\"EN8E&U\kb-RA9"7VlIfjgɳ A9ihY|ЈdAStF_[B0fkѰmago%(쑟7tܹg%X٦ŔI0:hj/ALfXBMVGQAkmʾaPr쌦Ä A)^K')jryvx`FU3$ ^%.%E]6`Ok2i<--OYdYt۲#4wJI*ݳ^~D'U>rs/rc{E6q|z(utSC(ћ};wژ%?ceyYai&Yf٠m$CԶJ<#+쭡GC.R*rGV#t^7dニ.R|cUnO"g20dm-^fWѷFL\hQdP rP*J@؀)]S͹(t(v#qT?7mA|&#٣6 %S˜QTACBӲFs q7^\UuUǟVeK"-N~\_{L[\Po$x 7$rH'Tg7g3 U%'i U2ʃ|`*]'~ԹZ2b 4C{4j^-O)Z38zѶ4'Al däK`R1>pӞ6g':L1Q6 ) ʵ RR^2WJJQm>kud>dcV"q֐ֹ2ղLCRTQXǀV( uHK)hVwy-d$[ߠ7/%CRcdZK%ZjZr. !^Vڙ02jKx2&gأ}96 X\ƹXEє ]zwӠVpEΓ'q[ͭEp9?2kJuiuzF5RS!/gJl0ߟQW߿WU)rS@S%YX+z> ,ŗ{_ӂyVQ$WDo9r )7.i NCWτ.Oq񛉃o 3NAe>t#"(%TfAu3 }OV˙\wL^<-9 yͣg mivm fX2ԓZ:ӣ,mU;qij5h_:Rdmbk|PX `V K(*rGvNVn@"+9QBW](=f/_][ #>iSz.e[gQ[9lԡ)['t_2E\YI xD@Z3Etۘם@>ӷ78S^qvF_2y%Y.@FݦeLz}2n,ner0֐E(oL"֘58-x{VVAB>xMAqW9UcK=Kv-[ڑz9mz~{ӖdٴCO1u7@OV*T Yc)x=EA෴bbf(PhJ^3:`!&ܭtIe#Yn0ô0ֶ^ [faO]6 'Zݪffvmo 9nx۶ó<=x4ۥ쓓9gڃ=}hRO$wrG&]\7л\J#G]ېAt/Nbg|c_]5rזf<51|!cI( _joeTPR%j+C(Y2*H2f[x2ad]6RneH=(ȴb䈋Um[Y79RY?i)@2 !|+^³(bH6o.PQ)GTP 'uXTό%'E+T8ߘQ?E & [߈/*A;0;ݩ24=B P.!*dR2J8LjUPu`eFNE]Q3NZ8ɕ^z).ss>|VbJtR#glMbBai7Fs˲<]=,KFòf,1s d<9dƈ@V-n9heLizjRZ(V/? /`:'7HaHiL ո@j鶽F EjuػW3I^L ԃ-ub)f@rzZPA=*vAZ l7>1]Vo%ʆC7񏗮bsr)glPV,`._W7!v_. ҞX Iю$*2%OIW/:wa=K1fj4߇A_jĞYo!ܧ:"O/pJ22ewC<`&oj~S~Y-8~EI=0nyV6\Z=#Fmo|.vrԈ,ɟzMxrZhE!Sچ0f^'w".;䓶2;H6݈B aaN>hkMX19;7`My's_m#iBoH fDs7X(&8#ղ^xK *4pǹ3F9,&#a5کi5/ͺ@,SS3 {3sLЭ%6/Qq6:)h ,6hkB r7ul{/ޖbi6} ,f.F r nsj;ǴȢN/CswU'I0n]\tn`,=g`Vh`~H}xd;Yǃ'"sˣv'(Ⲿ K?~mܤ7)۾sׯO#Y=I[v_i#0R:keLe4J]MZg1Ԏ;-ԒBiwp@8z=}HPODrztĶF$ 0PtqC] la6n U *>@]27;,+2z) ]n _RT {Mۘ+)/hA@tjK0(* Ac;DxT#P}m'`{1Tٮj2C[Vi<ڃc@\ص7Ej 1 cK*AwG>0L1q)y,\Y\AiwwGo"Yr[r;oAgM|^d%2EX!!"- üwJz1c'2tiDZ(:N4?hOizg{]Ӭ]%q%Q.i߽"CYw,= @{{ٮTȼ"h!@8ص]^}q@1`5؈ru\,&GQqX{gӋ1i=v9$U;Zyuв3VTbbsG1 M *A9$_jQxcȐSPD9[(\]P!Ԑ4iT6]RѷJ%"ަ1zsmy5UT#v=pJpE86voתYz]#qKXShf: t9zxЮLNj>sE:ąj=v5[nѿQ0L%}(7ozзYAUaUXsw6g`}sKHٰdf-n)%-I8=c˝!3;cZo|Z0}ɻEAo4u gu*(K S+Ŝԉ'@b(Ծ 9)j4UNMHÝ Djǿ*E~B*w^c ռ>ۻG#DuS;DO[+e5.>wEG\e`QA7W4qw!i}6'Uخӏd~^yj"VxIP~?Ľ~Kry~ =̠9;3 3(geF5 yhv#mȘsIBI¹#_ ;x.Vϰq[Gq^_645;iɒ/jǯ9nf&IAMpj/BY޵{c. a񎺫=ᓋ6T%HbϯϹG݄^L٧#qh2j ޖ5F ^g$#!Mudt4H-ۑ V pT=ㅇ$?Wk7eИ ǒh6'@!Ƒ1/_"‹GQ|iA"}2ELpWʹq20`0b?+g=JG{y}:'b/(8X=JDђ6ޣycV_ruҌfcj.1L3:> ݞ'|Lȷ~[N , qRߕdғ./bp`F{)4)5i8 WUXsn^_]?&4޼:3xWOU:6Q@/jTpr/%~ vNNԂ CDe Qp,pak⭄L`ꗎ"RI"fNUSM bK %MbN#uW>|*IzuRERLcqcP 2u4ю2 ₰2 [h,QDP;搵J&HŘ3 ;5ʫ ʯ.X ˜6),EI,Mu% hƪT8h~2SS(3>uM:BMw.7mzdǻIFj\lKU2f_6m6y77W#7],W?fgo @gЫOY\t>]?sA|h 3!QvOn_ b>A^dowcQzS P}S)Vne巄3+j3GWV7ߊg5z] Ւ2՛@]U$$2#~1WBJSELRP Q icSoOCqBaT?r.9,IPԅ[f/(Ƅ}}=U!rBRNbY5jee)w#bޫ@p `{'%bbb$Fd$l $^,'@NyRc^TImzs4Ԃ}Ql9o`^P$\"r.'ݟOS!Q6A%$bADhh]ߗf:߁<#t0߉Ψ=e<(x0`T'(csT9"WI\?b'hC0Lc A c4uԁQ%Jy"iVbxbeN8aJhyFbU2H+10κ 8KSD3_Ij1aPSmj'#B$0|؟@(&Ƀ`B$g8űAa?;14d "A+Ō7 CSFs 2(@r4hdesPڤzx.x$.uip1j=fd>ϡ\xOO+ I?lUn+}nL 4Qҷ:ǩ ʅi 7K>'1ut|B֒J$3K1j -\8-R6M%F=0 c6^cTiF |ft4}%Yy|&\3b>l*PLhGM'c νYC>,V-g} (pJ3y-6k"Le~RD8&4iCu-5|^6|,UYTY6( M"HWKJ)OOMFݯ̦:-`t$C 4f7wƾF -qc{( 4?{Xp%gu/b[?EG e|m &ngJQol{%lwS*1 PP/āɵ"r[Y] b oOEBfͪxrzY~\ηU"y4j:U^aF,NjO62҅o03kZF!JN]ђw)(^'u'(jn8_AQXcȐ3Ż͏ y t D~7,~:/u07=*Vv~{,*^-#7k4|22&M*W1Z|UCm$KYY<|c2"00mX)מ]O/nO$OdؤnSQ:ƮjMG;Ƌf~Sm'61g{:dYtntV{<g|ÀeuKⳀPEV#T!u6B# 1 zJfUTپPjWYU  rhy)/l0_i/ si>JŮ)n9]ebJ y[v╷ \KгK{3zNBjO(LW!/cc˟ F{ vd&YRePZ0RD1r;Xclթ  13o'p"\'SFɬ[oqƱ86OfMF[y28J8 |w=mCRġCu$nuдw7 ' c6g`=s˳K8ԭ[1Ti@EILک#ڈ6oQvl+F3! &Tm>㓂 1~~ZFyxo˝KyQƣk21TTzb fEV#K[7"c@YaR%%Uͼn :Db'eEV')rXBN$V3~jC-W< o1DpE{Bn7b=n4`Vl{Z y[Ti+XvwF(xW_B 1Nt\cJ|VJRgu N9/9L @{&xiJ%n1|?}(Iⴰ!ō%,I! Zn,e'I=$!"jmmcm'$ S{+ʔòG >+I |1>ȫlY 0YpR`w-vx}"L)cGG5._cٻ7n,W~wC`;`/1deXŲm$zA%EIU)VJ"sH|?C;|*ݹ}|lmqnr:j7]qx5,[㽽rv:)׏ٵ]?|)ǟmЮ ExQTAx)/8*'Qv)ʮo`u$ A@yn2S!eË ,FфTEфߋBZ"qMH 5荮}X6.0\UPgv 'B!CNQw+f>,8P!&9H!BtNs?rNVq9ArV/&\XYxmlZN)f7#-QOr/9B,|\r ܈f Ƹ, m*/ m#wCjĻSSG\E(ێ 6jqO t냣^1á9sZ)}z-`u#_rʻ<=C]aMYH^%Ȁ Djꓐu,qۈ\ o85P+I$/1Ё{H7LMabP\06 95< D !t$RW& 1Иk6C1 {6s`̂WUcIs4|C2„sN1QϰㄦCL$BpSR:EwƨSκ.8PvㅬY ΐ$H4GP EI%,7vyYʘ&HP/2#Z%F1K!GLJV`J"*t.QhyRs|g _)nKy24[ZfMY2,Vy0;g\f`ʌB1sQe D<.+Y)R B/&Z@"Pc}|h>-f1[EJn;ej??TVߑwo ,{ctizklC "bg+~͕jgjxX=#^Ud):3+x>ۯOk%]UFMv#2$q#.0wm_ꖶ[_n%]Ŏi ZN,L<VG`Y+Ldc4X n:چ 4''x 1">J504}Ǻw6#iAA7'wd{abJf&fhc #oq@_ڳa2qxr:< 〖'fgP!yfg1 `e_|--2 H0>'GWQ`!cq1#fKv.(!Ll3gH M {x1|"g+:_݉4oסZe UR0j]Ζi _Lԟ?ݗ#$@5\Jn6BŹ31e Ңd,18@L 箹U+1;9Qډ;Lmgw8]<\jdTGJҧ`Tϋקd /P}zvNqK5=GUu1ϯ>zG U_"L#2vbt9HFU^صnpC1xG)ڑ=j 'AvL2t BRHq,sxV`89)6Fj*2mXR3.,4 +:|XXuC$%%uxLyGU\#5q J01hK Unw]B2&XeP2# a(r"Kb\"+HW53(䨯 D[ة$ȅ =k*Exs ;]hCOgoAU0!n>5"UobYFp!Ws ~/KOX+Ӡ¡K1ڢר’cT:wh]7*%lf)=3ySNQ8,!pDؽ!X b)4\gu-veFL\a;:R䒔\̦1p1DN$aL>=]jA|%rh-3#)r\v=qE7Q1I,峣$@ [ #D+PJt ;D Qh;P9Ag^1crdQUF8ӊc@0w\Risd+&BR/U(Ajhhb\D8ݳda)CQ CK}& &QݻgOOzJPRA8B*/WMHc,Ĕ &^|ԭ'):C/M/u? ?vE!GYwN2d{^E}qgШ2* ֵ!M, b/LS@p(%LFJ%B N@+QKEFG xb;@_|1"MWrz [BpSڻ@`ڏYN ;=RCShx祔3̅ $5'$FdLq[[QdP6L*@fgP5Fc9>3lx9w_Lv['-[S?`,Z'[XkS:14E4hSm>|)3Y,xYqhfa/5FvJ Y ^Һ[s|?1>qʑE`}ͦ| 3% wq 7?.9RR>:R8A g40 "[6Mʒ>pgkC{2qhLxnR--Uݞr3lZ.{!+]%Y]ZQp.>/'ݽ9>a=X\:R+ w*t˪`xd̝f]c9F"کCaN# '01={(Y9p!2hFІ,9e (у 1br1@*(=]Lo8+q&ΚeZOW~ŕmS&&N= &f !PBH?$\m]f>+VO6lw fd֜#wdP3+{ǦMf)0J㺾D^AD:CyN  iC+ 9-`0;iЌ ղ7μ#wU` xBь 16-Ȍr\b]`E`U +V&cEa"d&\K4Ոc#l1q)8Q( ew @rBiWOuo˵wU3hnp:'EH˴}(O$ꌙc3ߕEx0O1w+_G|؋ + 2f{7 4 n6idȠԱ>26{CzAë" "(f_-O m rhg?T0.f 0୅i.B!;fOqsg7'W?m+gbw2=g{~@ontG֊HKP|EMvв& *DM !z'`1*ΕX6rP jڞ޶ڤwU3(\<[Kp'n/ӻzUXc O5xI|jï 'vS(g)yAP缳;@CˈvW(vlج6 zHI~/V`[l꧟/Kz1ZaS(Km'5jpʬ";JKy;ԋٲP:Wys88:q*8]R=N?ޅ#] G5CLD dB;3snID8}R?6V:Jd`sh~ޭo9%P(-xS }0N;-*,v>_c~'~#m'afqIOif`:fMi BXNK2~\$j$8 rD9iJa2N4rvDhbrU…,d"j;o? OCpx8BħvwX&wCo|gL%.s\*|aiav1/PYtVtc* 12CL c8\HR2]N|)-5-IJ?.5v#J3.;MGIXagBcTpP7֠튀RB2 B r^RNR"AjJR=->תlkmnƲE/3~ҝTj']$%S]$%$7_@mz$AREqqqq84zkdػaPPTYCQ*X^W!t"4!A #+wIHXCzF\tEWCIiUJ8cj)j\߳ RƯפ' * S!E ɶN:%z>}8\jjs2D ժꤪs2r+Lipt|57uO[!Zk\x)1oۇXj Z"}[&&S1^@)6"\f3Oܣtݓ@^KqzH"a\؜=%BFx cB[+ޫ*!r^6VheH(3q(Ѧ,.0'E  `Af0 `eZTp[*F1Cɲ4jJ$7)N$E*8VRj@dZ;>rtjp Q{,@]"&$< ux{屢]gNER ٩vWYChx#ڋZ"QNDÇHݝv[os6(F DFhs=mbdj`W ;jڢIBV`$tog=L|˵c4c)B `8HRJee0” ؃٢)a\n3p#:zOnF'&_U9QΏ2hBxK^> V6?=3\#@2P6R)[eM-} (hM_&-F im\ `$H0zF`q TQXdkV`Wql\6N.0 Q ۃ3F̰|'#h 0de9<%sO: cF"=i[ڝfտm\8~嚬qs)do6~cjeM3 ZJ}Ly :`E"n;/8jP&@F&@v6aT~0_3=;K^'hF*ga>؟GE:$}twY,ܡ3Zuyq@6Fw˛+kLϳãE>IFK0͋wz4y3-'\[Րc %%E|OI +)r=TfHhM70Rxwd7A@{G igN?#R"%)F0!)I4af LCI˓cؕ(.ػF ֭@BZçB~z"d/Yz $gOL~Z&它'Aߩ_K51|H׽ˤX>y7ܖ{P &1h-/M=FI\f3Um4JB՗9#3. zN9{o؉}.>k=&h_\@ y3ZOۿ>_[.֠_KLE𢣗`T?1OيpSc,cfZD@X/o/Ub2,n:fRn $iAS!҂B@e8o(f$7wR(DB4+2($3~5;Y~= h=TNQ M|;Jl M I)GA$B+MșQTJy]su$!FOYJ`6?A&32 e\yJ3s(T*ʳ"mXJzϯ67WSqqshR_m6>لĩrڒ?4Z;_;Wգqxs||l2wϯJ3v6ڏ~k+KLʓ߀oӏ__bXnχǻ͕&?#W߹Ko6swf~xN}"z0 p 8Ҍlf.{盾&x`Gg[gR@Y_UVl= `B^IJf˙Ty/Rpn|Ce]Vֺo5Y5ɒCe5ngӁ^ŝΥ2,`".aeb@gGfK&'aS zޓ;uW>83)d3Z *1:5)Ӊ xYs@ikH>JԠC؞&v7j[S؍At`8\B"E &) $H&), &@  T6QHC}r lgŶ_zt\~/UMkv_Q+t3WY!Fj@ OC*v~wԣCO]98 r mmȸܞJXs+<0#>|O3'Fw['ɦt2[2/2֗$mqdmIJv*SUu:_> l:UaU~(!]ޚ_l NpRP`s~>jh!Z`JP8N;}=4O6MMSTNSA'P7.$&\Q&<DsP\*s(3ьPq=z7V{jH]>$]FMs[X_g*U2?v+fv Rُ- 咥2U("Â,>QdSPHmْ,SyQvlzR@9h;S) #![u<!e'5)3f8[G?HA1iʼXy;2 I*ؐ!8[ EJrxfuA9 plqxDQC ߣ@'znj)57ЮY(+*Ɂpf95!U/Snʹx,sKį2c7Qb|f*j43Xwֳϥ8xrMxҴ\p+ץ_kS,[?6P2z1{?n?nh/my#n:{0]~` "(=ACκn1_st²_+kv^aaOA^1ũ_@e!CU 3| pnn^vbj58󺽊P®ε,k%/$ Z"͕_nD]]9!ͯA[ As Y ;0d6Hדkb(&_[+%*(&SǀT [ a|ꗛ,f>ǾN#ulb>5HYAޟ](?)lb2l5.Zr ~uB#%]^f c yLl%:UO/h/QG3@|)= yr_-'gO+)U^őfT: QS,&::-UF 3BD(ǽ_ b,;xA sbY0+M%>/ /P~T*}Wb?'3sxS ê/K5T#8o#eAub{DQPׁ&,y}[BXg?/ ZnS`Ԅ_*m l"NH#4FvbzBOsKNtp`K\Lƶw0-?5ĖCr^9'1c0%N&6&np Hm2EBjN7΁HV1Ęu  ~=+}Rj̘1&D~v6F 4ER(g H0q,hLuȳ`EuKnhj̻-h]H7Aribw4ǡs DQ{aĒ"zt7WcqPF¨O 9nOr)JB-kTpB1NX56&B1%[ M*~ n"eGxJ9X;e{ZD+tLA-LOK! :t?=A_ UJ /h=,0=z_ZV (AZˮmz8`d1\ 3I}w| ;Rs5$tXݫIbo%ؕK*n%m`#X =tB_U50Vj4a8N5Eۿ}וmg!>*R^}$IBE*(4K(D+ƽk9Zȍ^{x֨OgS|qNr3.@œuU &01^GJ0}W\HVJ8w0 ԫ 䦌+s/TWP9 *Lj3+)@tX'sT{'_YJA&[GldXl HSiL%Wn<,'rPMSix%8_8JᖄXZ%橶z+Hy2rXs0JRQ ),AsNJo/>毼y$0'#2ERAtqjUJECl7NQRJKcuS Eґif\,6bMJ&$9̏NUF՘O`g1V8ǜZ..aYDV$V!g&('KhTp&ǥU !I1UDTb}Qx|1Bs"PGH6fK ESNYI{0,FHJECɵ[֨ƒb,]m;&I~ 'śL.[L}rhjbzI95Ёc`N̨u.,FRޥ<)K{Ydd70&Ip s2R"#G$!Q! gHgHƯw4.Ax{J8('ĭZ?v7 #:WjHΐ90N;B.k_kg*"PAfP7޷9  %Qj(8um7s^=n-͞o~r+/j:y%3?[UG 9Ff25#X |cac1uf9I>(V{^:;4V9'NMlz>pW\&WjPϰ{?w zѫ6фۥѫØ*eDNN&򝒺SޢƐLIz]T#"N_i@*QKO|2x_Ш]$^wǸ 6 *nnBT4-l#rD )p;㲯:~4šWձ9^p.dv4&є$Q&b4D`.<6)'NcTCŭ JK+{5JUTv$|mK>`8Kv`WV!hb} JΩ&=%fЮ0umKzcH͍6VSPqIn@Q_FY%/v@(( 㲮jDToki+fB2k:~웮 F']㮍TT>\Kp\#qJ*R ^XXe3(P@aeO.NAm0䃙'<`k%4)"V$NRi,!&VFZf%"*aDZ%3*67}yMAK|@1s%Da吅E# 0v0 I6!pCRP=_d{u, =6`ԦR?般5TYQĨf21MrVU՗ >T5,'ap(;+2U UBMY=Roq$"c@v))Dκ1\*0d bL@([Yd/r٥PZc'bpIEs3Y ǔa'9nOCs0`hcj$DA|USoY!*4z2.%˗̜K('?X.ϥ~]=5g&y~ oNٳ3s@|w֛ e]݈O[x?٦Wף.7BVKt0^^k`6 l_ <8ϯ'^Ux<hl M!.Y+ƶog ֆm`WaNU {o]PaXE>]4o$7y#DשOj<;QD^w<:1` zf^2źc&6w:Mijz#޹U%*լ/_軮K8yֽ{bs6MmN>u{s_>ʮY{`֤ӽX.FKITq<9ɹZAE%:aDhwu<0}0>g1{X:3h$W# o-vtGg o;&k>e~i_#1>Ɠ[A O=\^fS#5mَSq?{vOm'&|ogwLe('e-YWnlJ>nR!x:1ݮw*/ּ~$лa!_6)Iqy7 W?^ѻ5A4}GvUи?@ֆ|&z`Sq2&˞{$Xxsߩ/XswϵyP5xW~DH $V|%ƅ3O~e:,sKq_}:%,Z,)˗?5vE%{On7JFM>9 NܦnR?w#ꗻ,[wwS<]tnç' [`%NRekE(u9E)c+X;" $hpGF=0MH{u^u# qw ^4qKy`c)0+79b5z8y\MyBZL4yȴj"KQ1#I0 v2Œ%J혊UBԞP7I_X$E Ӟv>- &Nۑ ~dEb7{F#ezTW), HFۈA&k,iR,:Oڐ|@1?GDHL底iߊE+?^HHF:~t(c:drx'Eij77z۹m\G,A_] []*Qdڔ8o L9U>R^ buQ{K+_f1{d3Aj$K^& QȢtVllمEAV 9Rq@ .R(ôJN єp(f Z3O$5 itb1t&Jec} z/)+nZd"9>hY2}#w١8cz d ! c 7W\Lf7Imq#q>ϑxXb%֨ʶ|'mP>baT :Hf4"1A3=NIv h (}RApMnأ9G; 0!$Tt-y!i:^8OQzwԹUǹ6)c;ZɃjHY7W/~1&auQqb_#gr(=?-{jg`(v6çA*A,:aM9e?*ם*@1*aCÞ= @3ᩏ4)K|F8^R:D^#hexE^wv8*M<ʿ1NËQ] FjߏHڪg ! ]]KWM>$ Brml#SGA(:֪i} ;ι[{*ZwBrm^ǩ8w< ֪i}vU^=;һMa!_.abHkTΕVF 9Hі X4錩1dLVdDrdJ$+#e7HmP=2 1pE!1/bL$OeQ!ϱV[tNjH D'Lږr !E'R{FJ%N G'x@:qY%1 RgP#7n2trYFUd<:$`3-ؐyNe y$Bs:(FNY"At*FI,VdD,d %ѓ&yAjPߘ'"/*(g$$K$IF֑ʮWyiKD@PW忏J}{݊k) '.ߦ-} E_ۦO;a(ɺK^o3Ot7*5 pDx+'CI/ؐSJKXz{A 1 2I3'Gƙ&L a,l-)>YK &/զf+ <]p^|3M;P1|QuqOw?dK$% N fk Tu⬞?\zC}'A^v~bgN;3>s<^|2Md$ ?7A6?: CzM+\\֍9ly5['''MJ5)Nr #f]!c<);#Ljb,ro &!LT0ZDЂI+;+SHtuNM;L*`-5u9lTnScw*ֽd;ZwUu3[֎v{CG gp8Ӣcɗa2J>f<:W-n o0` Vd|9!js0j08fTs? 9N gS v&_R,m,;"/.Rr]\r~j]z{Ob.nxWF~iBfE쎾}cP^W*e_=Fnl̞~EW]ˮC3ܻN .m~KJ ާEFք㏁zGbܨE;|QƏA0qS[jx? R0!9INd?lz{5Ox& gU x&pT5b }0,D-^u׉Ĺ2qs q8V_;bsҽad49k없aQV"]UA+*sEW&1ImYm`9 KG@3t)Dr d +3%Akxx8v c|&?ccsaOR;i0Ơ*E\DfAb Q iX0A=O"5Ri%$'F4z;JiH"l;$@PH: Ƀ;j4j4Y˩5S<6ʊVsqjYR锈| #\ڨd`Y y/=?;04sZaotO52FX'#kt7K$FJ刭htQJ(Kj /-t}<6wS`Y#Z \TȲHr^񕋊Ac>?E[i\Ib4Y#g7~9}TV΃%@ jFit4uR {VXZ{!0mnn6_e,VwST9*Q<=8AբQjFW.RT]DhwW8&NGqU8-+iVLJ~9ot??*+kt?vr5kB ͪɚȄe6%*֛-|PѾOba:D,t8@o{k۲0r@ϫpԒӋco#]5Ajmfýr^Z.;Rꠦe34K=^|Ș5KmU6KBrm);MA/uPgZAoU˵nNEm=|&dSJ y׻ޭU5!툫1yJ6|&zߦ2J4NnF5wmtTe%3 paJ՜ifT'ٝ?߯og; V' "lR[l+hz߱}u6גp'uiZVNuNv@~_[D֤Z8 n41.cO-`y($&f .JNx_`-1*~_o˘]8%Ge valW{ 2rSq}4|'񢣖rfIݖkěybҪcNREН^G!H͏74 BfPG:v%cCKÃ'ăR{eওAgU`]T] S ހ\s0KRM S)($dϯ +/LMx*QZ4D{λjWӘ-vDI*{zM*jM5cȈH m:tv.8%0ѽeӻG[ x@ zGh  z\">{jTFU>~ŸS3o}jOs<452T:dyz11%>|dXsCGCrԫDR r,%]v޵Fr+"i-/b>$ Hc$y<+#ivVj==6ུ_]X.r!# 0hY~HGN }SҡgCG*+^Z1alGu [ [TrG|nBn {L*( Z-j#MD eVf%C5ȆYA0O$=xno5^79]jR]H,j9^8U'Mr]u*@uxgG+0w4]ޗ[춸-f&9~kKǨrW?VG勺mܷF[ wzxS˷^ p?kiD8`In˧n0FˢJ69AF}C+0~J- TY%VǔzZ ts3Z8z<.kd[3 6Jw)eLP-!Lv78onk*ZTAhPξ:)~;YU~qYv濍ܭAX2/Z:C:ua'b]sB->RLm{mW잵uzxkۊ8^|V]?C h'nWzq8&;m-gnm`l]ec'#UO;[qTj`֔*HUd0dTXSvOe?zVh7xT M;UiU%v G.iʵuXWSӯ hm*fy6“,Mmj~~t``#9=iB{!ߟqwK)~@D>;09}D (\Y{<&yU τ!$E~˘[M!2>A*eGhNuN3ubÜ ƣAY9:1Ua3N>D\͹\RT-*9~|r5k|k\OUC,xH bEnoΫbk)uu0I|ƭ-Aǝ!Do>@C2/gI]a,O'ǃG[Pl/K,%k"*ɾ|mp]F.<2_1g?2ѨV^9GH&g6&uq^pD0w fS&qjGkRj0#/cӡLdXbP䃊ѕ±ŵ cYP5')(ڑNeduK^G.g)9PKԨ,ki@ȉx%|,F&: Z|/Ԝ(؀ƧlYy_!- D#1as*-/Yꅚg2s?CLcD&;`x#!A\h/3H^kOT%A͙@CP31{ZL .袂+-CL,̈u0z %{-jxm@pc9 9@ƑBkDq^/D,ZV^3xL9|bJJC090NڒuBC g) a ^0N?s?HJ|**J !rpe"o:9` `n58Ldh-83F<<Dœy$%huAL}lm^<6EаyMsFG۩O6Q>kbdѧF) Ć@FvỌ0屫* JKN Kh lC;#!TKXi]F$|e)㸤4jy}C'>l7gςRAMHTߊN+"# " #5ʆbtqeB›Z0¾NQXE W0*UݼTb@Qxn^@mx;lTn^,G_Kڴo|yK[$o :s*mkBtQȱ?^LqM*6_?ta]CghO!#VR)$ 9Z*$<)`F@V{>(t.*G1Ju\dBR0\%/> r:3A(pt^PDԑHB6PTXgH* N&BsPbMJ9Ƃ3m)EC!M]*#$5!JK`%yPA\daqAd0"}AJhry9gȑK+QzT)~ 2)Cp?ԡ)iɥ)Cll2 7qE 9،\6$徨zVykP\V&+\(~I:2Q*uDδ#4AI57o5RRݼdy6zc^J*Т#nTN`_`v3!i=J.#DUO&HqwѢkysae/C#P±կ;o*J7A']+8/NPҎWFPޡ{Ju$ 1o3ndwaϝw b MKYa)lB;mad6 ; 9lIjS"=Oe\JNsFd *ԦkބKiikb KqcܧcmGiЩJM5;솶 T &G¸wY]?pr?aAot/8lkƭf.?&u9}G?J7us`&$8 =Qf? &pN_n|]縴r`nsv/868;av3krhHgw3 &HA~QM 6:uJuF[>>qp=vrԎ[\i)卉GDjcp.lx|_}|\1}++g=p K%pgbu񸗧Uk{0>fryu,O.[yj+pzatCC^SC83. OkŞ<7s' Zݶ"Gٻ8#WyYFFDf! ,ǔ!)of7%dլ:bv\y|_ |>y.9{]ܣZsoCwc)*z|i5Cqce!wss"qi:g Y;^^]Xtm8kdBnب,q5hR3>휎0tgCF S%7w:*OW|w *8}ZSVf/S])>sœ[C{ʴ3:΀p9S뫈GWNo;Uui80Ks9gb=JʒS 7(;њ8q4oѶ^K,9&N+@,U[OH匏QUZ~ F% E B8[Bh0RAަC t*xh{G `o zЍqwKEi+8D 3-R1H?fj,ϭ@ (6-xVhb|xJK%ha3x%q9ZOҹdrve; kkk[lǰkɱz\P^lOQ"qSZf kFأ.ɞЏ;"E)ŠY){MO饟ư93PS#Ln,Yz(pvNãO]E'pvVቡ{p]zG \p-FIXy!y1pջw%yw<7%w/?&xS]}hZIyozeeO `p>{8Ӣ{Z g%$-#y"nb}[o͎58ր5){nSo8yoXwZؽCQEJǡN)3Έub ճ6W,.J('0%\ z/مRR{L669UF(7nu!`vmSj͇dS{L=i<&*Vmޖ![IR=:YX;ʵ,@0,T6U͐e'wUJHOSj53i <'t KŤJ Yj#MC \`p? tz}DRփ?=w9,ۅw[?17?lDNɋ=Omk^ZlGܢw"4쀜Zh6Z7YsuLڶnEmŊʙm̳'hE|?dBQDN֢؊X%cl+td18n@_q<s|=mnp][XqSYCFAx6ky ժT&נc2{C#؝Wχ ~+*goͱd 4W۪coLd8e]%@aN)uF/ZZ,h?S+0ό:G]$Ѐ\&tD#hfiڛ{:JD)mԋ/PK;$ǚXȘ k &VbˡJ+ "IIpNeUP[IR!Q R.~fUºlszԶb`HJh-`dHE$DZ|[EN/<'mw XRݨ6Q1&\FgRY^ƮrK)m--3 +mgaeFs-8&löm-T 6}BTbYw`/={MʄB`J ( <)d /+c`"] A7̖9rr9OJxKm~[.B2=K)У)P#VhkCQXيV9G/P鋬-A?\j̥(5y|0TjȨ iM..-Aj .Z' #fRŔp\r"W"gؖ5t_VNåI;tYbHC.ba:#z&A\:24Fj sFjS3.fDΓ%t$c5)b$ 'r,!ei2ngYPr R<<_6A9ySICqrN)MkTa U-,WBV%C ۆm^H:F% h\DXU'rEiʼfI}-i?_b^I66=D=M @װwF4f^64.O)RnS;g*Ԭ$*iT='lj/rlHQwK[ MK+2,yrQ 7b5;mZ P`$dlCs8KϻRX(9禤' OQf2G_+!S/fS8Vh۵eoevcfo++'B@z_ZW ^۫GDK@)_znCoAl:a/Kא:^F,\Y ff |'G(%%>IGzʫ!+Ab~r?u4trIDCɠo"49ϱs-=i[t;5B:"Rnҫ, *8&ʗ*V};^.'O%rʸ3{RB|N]L[,)piB07i7 bq407n+kg9U-3zPX_ٮ`Qmm[E joDe~ׂsܗ@%s^*G/l1+EEHts?i?ϯJ<*w#w &CmK΋ZժEKJD$M : g+\{ߋUmwTs6^#7gd.o_PZZ]8Çr9ݐ~Ⱥ&5B]V`Wjw4dkILZL I<bd8f#/ۤb)c ʌɳ I2һd%0.dh%luݓn*0ZZFyXbQ]! bS<O4A%Ƶ9 1^!Si4)^65(J$ت8sRHQ]_;Q'nQ$O2}=bɻ]~.Vȩ <0'(rRL ֹ(6612sN6Z8E#kF؉ JUSTtϙԶsvJyǘDe*v%H dɱ&[BA&IKr][o;+FX)dy 2ٗ}aǞ^YΜ3KJJҒ%T)n`$q:b_UG -Pյ[n횎;oBn ::TMejӶnk70d-QSWk'ͩ5"[a;RpnY2ZINhF[c+tSʯ^f̗{3\Q(-5J9%a0fJjT \ ܊lmy?JƙR8K ʞO8^KPj{GR?i}20矛3P,qK]NZ #];,{0Q^ l0N/P$U8^0Ooaxph5IaD! A'f0qaD+MSGTȇ\jq8a(F 7,AԂaUWڌS;3qڛEJUBZc ~%qLTceo;0{X>5N'+S+&b4(Z',Qp FB<~ Nźr >/{)/J){)̔P%Rl%SM%k# X౨r. gZ9̴X*m[X3HwfȗF6#! \ᗫ"A+NzWA dec1sƴblobMT5A&7',!1Y'q Y^ @1V%i+ B0@9ri :2wehY4:أd9VQf};b:̅N [UIR@V-NIb Ddv*Ԧ+Z3ϥjƨlժ'y85*%snjlh*bUseWۖTYa^Ci#IiM=ӺTC̞cTz+%sP\aE jrs w$.LXF }s4aH9RNIQXwK rvl!F)q %J>~vlO/V&,\qcFb87T0%]H${3X2D9 -٨,'\rexF*J ܚ7HDe@9 rjPYA;6%ݗs 9m-/Y3<7Zy%ZR8dq8-tQ]QxrRKV'g4(+8EPQ9,G7hpzg|`$(+P <|n^ 3T^nDWJdnMux $7$ )KGNG/AYAl85቙)J(%61bv1WHvD7 #?-?Vn5Q3fx5;h LZ=;o1X!{+uv^ BpOݪ׎Dd 1(EvȊ@doj /c(q3m#goiX@M>§T[iN+k/4#MĨq; R SPDtC]%uC6{/wCQtA5QX.e l%i Uwmj5nРnC?~&4LFO4om k L/jJJ!iT3ҪbZZ^;E--YNЭV`i[bC4\ƏSa0+/6+oc* B0M^R\k4cm a+N͹owRaO库( EH(\JxLr aG(`C7Qy%elہ´F6܊;-XX Z.PW $)j:p7x2H$O:7xt,"'*pPj&d>[-C%on7vEԫ]s±0WL`_Ռ2 !ʼn-H[%/.%b-ț9'A% 4ݬΰiLSmfk:-glx+;^ӽ0s4_t_ܒ *{o Vε,o;_'RQ&:ٴPC)i= AS,wrf4xQ iF'f N&$^=q8_2 vBcyC Mxw$i|Ɨ~_q?H޶ 6u'X|Zte5j*]u@VFdVF~hpwz|SW>bvoC\ 92@l66#` -*n[VJն a2pT]OQJ5\FCyEx V|LB` 3 g@8tB7Œ;ᰕշ:T {L zXk̈MOQ|_fl3gV>*A|[8~*gtŲƵ}孢i(f~ot*l;F >t"[E:g|axyWp}ZR-qY99 "+=FPi`2US>/ ;` ؔ;!HNP 5SZ5^\x##=%N ,Z~05eS*$bNycAű|)Iv*HhX)K>=N +L*\X4ORg/hWxZ%<էŵ>νǁ)P;|9OV"NJBa 4BNL;y!Rt 2(F>NJS%*0Y.dшtTp[따C%4w+, A2^J@9 xjǎ`|ȼ1ِQ˕0HɆkC d+0[ 5D躪F.I8|wEMEM6R)eE6q&4֬QaZ`l3D('Ghy7n -P4AxOgyx2Nm;,}K΍pkBx)LKT/.$&CxzlS=6E4\OzԸiCn0FWrZb7Uc/l{Umh/۶v1?j0d͢qb$j.?~H S9W`T5u'M;*7{~kgvsK|`w/d/׋0z}J<3|yJLj'{QA'kmL* SgkuyYbO?%c#ҧ!9CdRx O| ʹ0Jⲫݬ.||~wm"b+}9' d[iJrbMg DfLZ]c@U"E#]_kG6DJ$pĶA{`ejP'|)Ĵi;PD]5-g :lp+Dw΃jX+_jxEE5G$0Zԍe(jEXa'+?,5ZZ[ 4HDaXq+IϬ(xkq2wg TykHZ[eFWbԙ[Lj`#"/&D(ӱܟC#/> C[9Α9GƜE9/)qFWHͽOd)pnBXmGy;+IEо͎zԀ@C@LQo-awr׏6ݪwWY^ 䜎oÏAdݭfѻzP=PiY\>?xS^ϫa~@kgaXjڞ.b1δ;=w/%&¢JA!Rb}%M' rfjoǯ?=.bhjװ7_F.ou=9-PW$~$RX:;xZI0r41_}.;_y/k5>\ٍZ+u]}zѸd^;grE#˧_O..091Rk!(~}ڕ}A|^J^>>\t/Pj;O被G],>[77w 8ſ-3η{hܟǛֹTKƴ=:݇,Zok>Ao%_هՏ7,ƦzL{e}d_jct>1opjgz*y_]\͐7>Eͩ^RZк Av:cڻa:u~,к!o|S'~q>nJnCu2u~#ƺAsUrL6{uK1CݞS)Џ mI?~c+?{/W 6u_->{Zo>G"7|u2HCAvCD ǩ@sJEޓr .̜Wʳ{OV{.jl\10ڽ3,V 1x)w &rި{Eqܷz}8ݦ Uc= R^ 5<+QR K4`'o*RƑKޗE¢hD(C:W㹱W)Ƴ%%J-QܹH,p>5Qj"_@~YA4OVVcE]` hՂ]8Hx!Ml1`f^(5O֊9KW"Mh$RT'q@mIA~qR0@*[5hnhͫ "Zud ȀEa͢$`!H(B A` H(*(7$@ԦNMg0vn){2ۭ]sYC@)E[%}H=ߔz;<>:-{k&:,FjnuG+MZVM%yl rqae}NWܹXQpL ,Tk%B@0 O/zYXh& `Ռ4,AAPpIP`AЍd~LI*PRՊO Ҧr_ ūƹ+BK&ZߘYfµM|}'$aRoA&wPY[aozabIdƦƳt90s Buh7Bj7GuHW3 &j;h#f(D~cvwoKP Zxf:HޯuyvTe S8NzXIڳ$Jh$vPӢ䗻F塞 뷑\ 9(Oz*1%STiP S>0=wHy#s#5%L)(: &`Tk=< ÀuՈ='s.rWNNV;/sP@x,K[?I:V/b"G0@u]BAU=ՙp`񊁻:)ͻjs eiz pJhu2ׂ,l{I4FR%H"%#*@\AH"v"e&<Jh\N%NM2*%X)`1J8kDi'!ĀrE\v:'LG_\~ԯYJ.gfd==OwE ~:?C}$Azy|`w;KG}hb;,Wd0͗|^ sgd!tgt'c1ngf?Pbk ()Fnu)_1V<^z &}Lo@!v~SgAW-I:Hv橅' yod"s35J-Nf/Y2 s)π ט)d_P 3sOf4Ń d:+шA8 Es&0ˍr9e-}-k L6T x@ ̚*yF]}#]OEMWvZvP$jJ"JȸZrH^uLjNK]Yԯz_`.o|=h>BѺ? E7r&)ڗT9J%)%N4A$aq!Cb&h#Ȉ@A U0KW 92dP+K Z!j46qlƑ "LQҚ0aZPxPݴ.ѦwdXKp},ZS1fB<]a$Xy:56w DМO5c3m{إI=8*ur5]w:^K]^?b3bg_2gv?e }Ǧ֭<~5[[cOb钚tq6OY|i®x0dڳ?y,RV|*Snm]-T>2r~4_ۺ'zZ*4W|jǹomn2QwnE"Hc-ʁuBCq=[L]xgcNF9YL:KwҺ|B%&jr׽ySӛ@,_}NoaB?)wQ^˶ IpѷD %o'Ֆ7\sTx#mϯ) ucfxl']s#)8`NRfaqodmGl5ыTnR1v9 ohxzW։&s5tͭlU,Ea~wUq;@7U7O#3{/jl6x rcb.pgyN -X%C(!-d| Aحn@@L],\:K Q%ef"ҖTO+o($CvgіL6tԥ c.u?B;i kЌ{{2aԼ6y<;4XJ Fces0.H ^W'~YhUWjZ}A7euˡ?Y76纺ƩẮ\xŐ_7jWwSY_'jxK>jK\I۞v&PO&hi٥3饂 K_>sҧ9< *QcG$ 1ƀ$+gDs"$2efCF)R-`hHT%v$M40DK"iL1X ұ%k xB@lZrKU7C !:u;\{jA|;u]Nݙ`Z`#&u^=']8%ܯS^$1,TLe2 )*TrHMmOi/T 6B-7#؉rPceFVfz?[j6q=QR\uI8zOYoU?pWHw15|ubz'77>Rܼ5´@ ;h~"f:K?Vst&= rF͘){ZOPжPdǠJ*.k%P@H{ZHڋf0zFKb13g%-2&c!o <^Ld`>?'{}~ ̅t*K0q>@/9MN;b zv=BQғ1=6ZI.ns7ޚK:MPĿsh0Av^;߰h%',.ai(jbCNV9=|jWO @Us:z~KZs@V„S1hN=y}LեzҋQ'WB&amSuσǝ݂Ni%E6XW_bjCs=J5j>9|rWtڳH!!* cF5hDPn$H8ƖCc?&?7)࿷+mUβ=bySm/f ~!UT5ta D0!csb$1i&T2k L 02CʊR]IĄyT`i7@.e {bdbd}ecQ " 4`A!0$8Q!b)@1{9N/eԂ?lʈCJ>Q\ VF(M$Y.Lbňr$h5v u#aHH1 2298iN(&m~`I閨drY ܒIu(~I!bs;1@ld@Z`P7&ʈ%6ڑJpP$@֠;NAS(A[s8 aw!)tn%[RĻ2,k<D f<n@*KO>pxךO?>td=`sV7uz寃Uٮ*c_381^gkniۛ9csڑyCHE݈wN"i'x˟`V_O Ş(f`huunw<R-{y=|wzǭ9>>$Z\wQ,&^/fc ޵+"ɼ_ Ɠy"kJ%[u'EvmXnb]b\(n5\93wl !{36&eQq{ s{ 7H");yK8P!# 7՜0Ϥ|9V@ry\.Z0v o!q_2E ֯m10"!X"lY[E+*|O9JflohQu/E4'd8N:,nm|Δݺ m{wуl^|L5ZUc*-{kupݞx3qsNK [ TJO I- (K@EG֝zWQ+|v1% j7H4[@B^zkݤxA}vੇH?yRW.[2e A)SNziqgio2~)Ճ=}w5_WOj~2VwhY(5ӟlT򬗕}ׅ "@@HM~TL?eJÅ!r>,#?܌Ry Kt>S؅<_\Zk+y.L9>|u7Af L1f̼g,T:L[c>˗A1S>G]޺< wLx7XOXLу:=G]Lg'5Ls^ou}ӉSx7"w³\Yp璡:^ʻ@Qʚ›.DejUX-ч'zdRg-)#Znʈ<"\`< Q[%((x&g5>-^ݵiP(} ׀XAk NVH pQm š Z eʂ;Un u#Y> $XM u^#QXBƱCT5AXMPud1NCgP0C+d700*ܲ0C,8ǠMWwKB&%OtY w,e/E1TOTVhEWзyy{ɔR#+pg]΍Ɍ]}әyBæA;@SBR½01uZ@}Tu*$/3,PO6+QN##r-(Sɒ )NpF (IN"+r_9TUv6aV3]uK±š킏)b"͌nRtJ&WIYTL1*4P,8YX Rq%q&G%1(Hrb)屔"HcH)ic@tEzu(R›\&B&L-rhJjK)FPW?ZVfuc rSl8y*sptg yL8IRt@fX,NRTA$)s&HeؓB.8%",Δ"QieFD %4(m4EB5n('34.IsEvCYz>ޢJ\P {p( CAWݢ۷HO +$.횛YVOeeO3j ޖhX`S!{?#3 1JOP RO) ޤj,=}ip@mg<<$A}V;(fL !gjۿor0Z0r.6=JbtzssPqw[#<h.NQ $F jG@it_'k}uw &DJ1_2Xu/.VVmbH }P(Wd\0n}b  n?Fa`{z|#5FchZ[b‡@2um!~> KzŐ@2a>6bN%*QrHH$#%H)8rAL4G3 !6mխxw›ᡲ%w&HXNA(!`9DIzy*bT3A$H`]zF ؈P͍W"h@)2'L1y$,CNbD4S @s$D)?X@xћb-yPEWcx$NaXDGqƑq"`&fZ 看DQ $yMWn.w͆@w?P~+eZiAݯ-?BITݯJ@M sp\W`b * "5 2:gTb뵵t3(T0J2$!1 .vŒNA mFs6MOlcV3>R-nAӈ*wh5Rk5պ5qt㢼Gh2%b\(nмZ@"wj M])6q6PBF?wpc"Sv;򐋗pЍ󲙼Q#7l(}Ex>Ւ.Xu i[l]^(QRI*8Χ"d9m%;T F C4Jec#Ӿme*du6Mt}DN׽N]8lߓ8-sԧk7tˀ_ߖ NcV;J9T{T>+)/Q-мE/ZW|}:.+O'yޣ`;M@g #p:),a#M!K1d,0{Ak;;s3|{P̚NT{3lF^¶*s` )aZR-ͣu'PJ)RʤC )=F6 wYK)ylZA RHqoFNJBi'PŲRzmC>o)ENJVdV}6ܻG+ Ͼۦ!h<'!PIf%.V[=6k=܇Xx?zo2Ơm+>Uz|v+6xGYN!KXJݱeFX\VjڸYrbIj/NKJU֭X=|Z:)9tʉ c>cuuʧt?A!ʙduǜ\/0W4DR {õ~u8!ip:r<ܱ41bFW0nҥ3?"Kxs~gF/רۡVyl)#?%?",kמ"HcH)ic@a9Npr0$89M؎ލN<L/_gF ߌm&ǟǕ&P6z#E &O}O6{vjnTJf`u;~K3F@. >(pnDhz?g2M?.S3>?5u/4]H ]p9phz{$+{-J^y3=){eY* #tR|c)كbCQh[BP_s!#o>VL7 7WMu4$gU?U'8zPԇ(gcc:=a G ]".꠽~PexDXbGK}Ef`t[p&/^RR !]4=BQ+N;ݯ:}\d嫧{c!xsX}`oc^!c 31ߢ۷˿X²,+^wGL]3P?{h6_TXt@i۽hȕΕ◪!Jt9g;!jӥ ~oR1LJ5ؾj}(axx~nzЊ{X ![;!ĺQFK6*@I44]lXv hh+շTsĻ޲[JR*EշT3FEYJ`熐nD]rb(ϒ8TQY,$WhHeP:_ķ|`8,׾3󘘔.Upb:΄ԊpS[\z"OeJ&t&l!R@urNޱf }emD!-J[:& Z 5s$5'l'h3+A _@ VWE'kdM$l( 31F=uvD(24_8x- `M'|ۍBE$nrKLk}rFjIn]7֦nsvI>Z4gW@3 =nn@ WNOndMcىxU]{8]_5n93Zj'\n Tl[}|u}DyzNҰ3ߊjŚ>o) ;IaҰ3ߊjR꼥C˫0CT "g,20?7ьʎ T^ۈ#)y(]o@I'# rl1fS"dOܫ*0v:p+"IpQ}5"x(\}ik Q? 2"-_9wvC]^ڀ: {fÞ kW5ӫ0/*&Vkqԝ )-f]]ħ5(PCQm'_k/O\|h«={EW#TᆼDZ3-x@nΉvbz_|cm_8 _Q;SrHLs۝;v1}s ,id`,韩i<]˒!sW"jeSPZS|n_tlINv):^AKN-TPE(1:TC}^4tvP:ETP.[(SghY%;DLRJL@Y. (Ma"n(+-M}47晱¢L^iNIEM37eSb,n~ҧu%J>J LqiKǟdC| ~oX`뻛.#1 tko˂>g~h<>>yO7MJdg Li_N(tD1Ubp&+UݬTH96h i]INۛ`t9vhֻM{_~kDr4_(zT]nCUMZCk{Ύ,šX̾a ~ҍ DSE ٲ 9Ѷ^0\}5bC5ȚBT%?cHze8MxS1EnL02Vi?j+~)VaəSoFtXhWW(2:wG5Jx ً!mKҹQi)!\Itm(}J[7XbJ-б[͙ETMjf@ŔqBQ$Ad)! MtPEeep&,M"T1!tc.'~i0~iķOm^孞eOn{m D*=oB?E)ռ ~ů~  A1&0&>4嗟^qbχm)ӚS͟Qp(Rij47˿Oh_'N 8c ~OOwwn,?m9bnum'(ׄ8e5,/A3A[NPHGNDqi4މ0&r8;\Q̦K3RF¤t0t %̥VtJգK\$e07v/f1{ɳ R "Z97BR[w\)oGa֨F.bFWl9a8-t M_䃍5D4a2Wݵ+^ܐ^T0u,")½A Y 2K3-[^G?S%z\Y=_W3ȃu!h@ڎh05cV.ߐ s 5 8)E z!SN.X}q"?K Щ9H1W7s"S5X.s@- ,42Tf u;Hb4F0cxB9e2Z̖o3{C PHpFAwZv4JAce$F2UrLK!K5hcab2ʉhdR.~SE0I Y2~{Us*% L~R<JZR\fY"؞Z.E2B9(7Td&TJ+4*fTvTZFKVLՆ[Ġ\U8Fz5u{h&Z";bXA$Jcxl3vPY s&Ds>$4O6ƣ $՚vyF"m{lc<6(sbVQmP *?*Yo7BZ7Rʪ]z338~ezhSz'kݝ(Q͕oTSU)14 Ow_/ZʁӃ}V)/p_H,h3$cTvXM,NBjk yI'5( <9 xAl9=3c |hJ`-n^/I8Wtg\lLN֧~er/9Re䆢B NM( ऎpbS|u| xuN,>Q"͓$4I99Kybt 6BXRhv$oUm'ݙQʥhu^;ׯR R)8N0n(23c,hLhtp,gNw]5`g!˼h(u4ptGib|o.IT`}DHmRsFJLFTNͥ-Aa| SoZ4Ü `y# ypYG[ҁ֩^9{R(gF*GajOn Btͭ6D~'?;ϨQZm:Wیw\\Y]b?v̖ '6i>+^Hٯ3w3oh H/fP @4'5XGqM22#;] ;8s BSKMh a9^M!G:c94ҽʭѩPSMڕ Ir"w-== vLDM17Sitwra[q/oG {qr/R+j/~vc?3gR-S{~$]Uv %~Boٽ},>rr0:Ս]: 9bޑ\P}|/K3iazyZ><-o#oыH\v¡ݍklVMTGN LT@N%*ʹT~؀|w9 eƝ^|ȕG~u!mv5ea`! gRd': R"V>yv:-!W۳$n[fVK+v ,mjpoYD ŝxYDk,޹pTC3k47h;IFJs MCG;3k?Ityfǚ :0Z儠;Დ]nF:w]n6K}\n|1 @g`XoF7POMiNձS-B< dfI&Hyw4@AT2CU!:Ո"w(Čw4:ٶxJ+E=C-H+(l{vSR BEb$:hNfw9J[-:d5T !\Dd]8TsC9\`-щ}GvSE_P|@ֆrT`P(!U-NPy_P;rh`ElKc(J&Z-GuD0蚇7("4 }%7>X91ڡ4:MRtp;D a"07 ͥt;yPsN ܴ$<)g"c*2Via:׮O.rs.wlƥs;i-Fr㴤Uj۪:?h/(UW׭!\9E L\D ]D11NB ¡DQ$fZ0G!K*T$Ůxv0/tI7x;{Ws PKޯKDA8L'3[K8K靈0^TuX ceNoƫ~?N& KFt/hWG?4if@1G̝1wvsW1p5;D KNA82V9:nk`w(!7  ]%uYfvO*kn-9h=ouX7ŲUWv==–F/S@id`ښ߬-Zڠ^j|ԀY{wU=/ㇼʉZ%0TQ *UK)TJآ&O>R6rR g)waJt*ȑ^R RKaumNu:i}!q|lԌL835"PNO95"Pqz49ew& ʪV(g[RJDzzI\oNiR +>oP؀ifesy_^>3D/*g[Nӹ}]b_?|t$> -_ 8x7Xc\CXN9޵W%쭞Uw)w9a|+?f6Lvl2[;U<ߋP~3c FD$Ex)ý^\ܞUk!Ny<$|\Ş@0&|tqEZ whM_Sх|? PB8Fe=Q;ME/}9k=@ݞ/*4C $ahE4<Vtx?W-@_l' Xtz,i4 Y$/Mo: 5gq%o/PkխOQ:l/ }>@L5H[ꏓ~B*!ƚ5$PxQ Ì @hkIj lj['eJE$ Y&J6-h>auC%y. EI G!Uȏq|Dk.PGQѡ,(LgQkbY몠`ӫkgӦ/>g߃ۨAvę#~9\],j,ޗ#]K]9s@]Rs~tuhͥ_۷C.ۚE/g %˰⪳;j@`SKY6.T/Pr]pX)Ms^Lf6/`;VQC)#|pݸ5/Κ#hA5r-,͑%ojgbU~G2)\!\×vxGnk9+ =X9cv;ʉ>]I_'[$J)=s\!KcK׵!0q9[4;bryY[i MmE6g\⪹ˉUz|$J],AQ!ݬ^R"ݾ,A-T7LrKO*UxsPbMyn~D{PI8\7= ±f@cN)k<=1^j~eg>\Kku9R؅#>%UWkwW kLs͗;SX9/Vұ=Mp┥EvS\܅c CP vYdg-^#Jپs  . otz!;eb[%t !t6[H)|n! U64j$24n_vvM+Ezc+Sx .o>ʸ템Ii.īs&ɋFFy6YT)P>`s m"xLh#+M^odl+R!/!{q"& OKo?ۅts,Sy7%'Zr eƜ"Πbz䍟^[N~CO!: fڹygIao̅S1ZX6ȅ:Ioo1&75(NtyL| 8)M;nyKҝ+*\+l2-:1{1y#3A\+ 3XęљfμR䌻38%S 3|xSjNNNk{R-ӥ/=YMmL0t.q-5՚د;]f~|G?L&SӇo~~?|XX.\z=K&EQkqZ?>=h3ۋo=vp]u??M \mk!>.-L&MB5!bɐ8P 8( @ģ8$<( nTai (/&w [0-8<빼߂,__k~}* #M =ݝtwYOwWt`0D$ a%bMY(FFo"TICX& k~6cM=WAe6O=33%˟e߆ٽ7k?hJB$߰a_f+3cFZuQ.eeX A#„kbcmaA a5V 615(FU 4B  0S!3$&%ԄM~VwցM#U%ɬLV":21 A,D1&Q&Np(3!b aAI,  `: D&k D$L )Bcljp#`Iy, lea<&OFAf]6/Caqj//le*\o!WRq[Nߓh./!y L5̀@#_W?ާTG#c';jupWRDP8(H @{L">iJq:&KhД׶d.O4g|K{j5j~,mm2.,@0pL(<4|m, _֢П-BB#PXQ"8c̐6,a\H3&HAt. ,2ɕCޱPUNπ$al`$N`Cep*ccbf뢈X%͂{kK73IS. ^5& 4J, PM C`fhJXA`X5)g- B/^+*->̾SAPPkj;f 4ݴŬm;BpQTw$Y9do=!Gp;n7#%#W0^(Z(g&Rx5&@?OrJ؂3?Mp˿j!Jj V5@qg7LJ}|H }PNDTYlت*fTlABG)p .'5p_%i0;0*ӊÃ>;,W6![8LA3c䳈&> axb?IHRC`U IOtr4t#g>3[M?UۙtiJvaEN0*\<-9 (3E? 8-cZ4ְ%aKi쾣ssj%mUN].\ygKVڥxx#aKڹvK%?u8oבY[GǃGKgKVҮ)lNms5n=\r u :Uյ r-2[[ܱɂD6pY&ȭyFCï~ Pax[[}el}٭m"$#r>AC<0֪pALTHMSA"o/MHª37lxO?}ˇ=M#= ^Euo4T=`02T -s'|ٛeQC=~<)kp"evԶ:jJH:+E, DBƁsak)3EW YA@.SPlύ_'R& K/3I)83*+sƱtќ0hί>$Q;'zr!_;$3 =.जupYS;.0P gL4_Ou= S؆K79X'W3 4|IOmlZ#ps0&*CE2T*\ rfgV,8l ER7M+d]GP_.]Nz~㻔0 cB )KJqm#BYDh$X.&:%~іnlvV=Ӹ;BcLHQ}) dvԺ)"Gv!ǡ_nuǂK)$ٺt [wPwuʲ5ĺybLO׽%I *@ť+h[ʛTI>yo{io2&> KVI }a/Gr)p1>E ",uz.cnT!9@y ɘJbJ" '1 PzEt1RgG$"\[Ah# ޞ2j+-Dk`<(XH p!G~;tw xl?]&30mQFXH& v\b 9`.9w,:\ ݼK]dD?;+owω*2\g+y҆yZe,fߊ(Ark,xQ\yk "Yr;Vm*CWA2* uv(L}́zρ|Οp5;c)2];}h ~^F|ռzy|@bvw9 Z VMV`^Ya`S̛j~QYhan?~#U t{7W0AY` @* *گ'LcRAĕa5X vM!0 _F89ԔȻ{L` h< FDh" jV3~9CA0o aY@.8Zm@k)cwZXQsLv*)w7\0C5h3o(ʫkE .oCM_; 3 Kܱrz%_ћP]=D#n17W XEru9Xo]4Z>,5o{1x~>Q-<%rQ #zFP0I,( 𣇶GD '<_7.!_Cb (|j\0D"ϖ7{7O;0¹ Ӝ.n{Q_\Ͽfx|1d+ =s_졟̆ɛtq^Q JezȔL5@"0Ve|R+ N?ˬ3CfKVƠ} /r1$[ hn}Pz$>.nzLJz-R_ξzNIgOyhFK*lQElա{t,O(DŽ,ZrYɽ/ uV!@ZKsO\Mƽ&h֧D*Ҋ(;.Kc(s!wl[^Ogn\oR.<8~XF΃؟ w=Jisewb|܂2waq4-g7Y.n1UF /B4śFL{HǞxsU$^C`*\' @TLnqG-Awͯ  0; pnp*~=I]D:^ge(̿ S~ g> /.ҊIK-m9C-Lp$no/Mz#^8 }F7]]̦&&x&z,,̍,G6}qm~ApN.5paߐ;RÀބۉ'?.|bևiCoyƆ*wkb9 "7Ǖ<9+xTRIU)J_׾2uդ!rSRzǺIH}BV"Se /!IM[L Z:4Ot‹8w/wAթ2픻3qnnuhȟ\E)!5W@-z~j'쩀J f~P^BͥkL-%9R6\V rSULn« +&.e_i;&pﷅc8SNL7w؏8Y ('?ůy~\17cOfٖ^nwsgf{o?}kJ4 a)ίB !?J({1dXO9OQOfYf9̬Ir 9o`QAfE3j4sșVpGm7{)p2<= &iM_Kzqy+k,|׷"QEѯ>C|eL,ƼQgƓ.•PɹMVj|R(=`: O+#%d HMuR dIs$Dwa411l'Yr~%}et`=7=N7uwΊ_n(4) "h:ֹ}rCqn4Ig~d, įLY}Fn%I ?o=@!;w0g !9U [=<5փsU7>u~i ~pEMS;؃946' :X1I+}Zb_wp6m:rP{y7^\fr"$`::ޭuBZۛDfoFԊrV ׾t~Pm`x(,0T bQDjg 35zC%@$t%d پ"I粂 :(˷Y.>靄B)ie 8i'"*6q̐)b 1$c-7;UXB,)$JU,\Ȑͥu]$1'6{*OP!lg̝,@8Y40S#XuYbo58W iaDFNmfZ"$vh`ZzA*Z_Eyv2u=vSC|jYen3`1k>ǹVM4Q;0ygygygygSsux>yLB%( !g8a c!4i1H0∣!#ʒ$Vd3 W) } {>jj@)7DU寪k'?DnT PB4J4c(&aqBCNM0*cT(T(SHJ}9! $'ꎹތM8aw;/`H0;K ;\XAmsl~ҹeyrVW] zd5`H brG1y@ms*bSaD89 4U ,M OM;U &FR#W xwKwZ`)#ԙ֮N`yjo߼U GEvS"MČ-n%V EA*KCe ^m>W߫Y7sbpt uZbZ*L(mFRg~8W̳V:zW o϶ O?wA~Q_-SDڪÚ\Սk\}]RFw+8㝺,^׸4FU8C}JTSق 2Mj=ˊYe>y 70O /ڸVf-\ےQAj E1f\jG~`MAbHj>s1yӛߧE?IU'o LB0MnL f@WT-Ȝ0:|#.1:^wk9As=:uZZ̃h51SԇS{-F]nPU[{/SDc?ѺWNQ/N!uc N<1prϨ4wkou*Yss7Cs))䘋@v.f_dv:]OTh8L_Յ;e:-_NїfKi{r+g&SMZ7"ȐBM>QdXNtԣ RسYJK)mmmI-Y92[J3ctԣԌ➥R+ˢK zTz5K1tc)vfhYz,%čfNtԣ\~t,ҍT[KI=*J-8Yz,厕9uvzT'u{#Krc)2{:v4ZRLXio zT ޯΛdNp7fR9R_@`Ijz,Wi'\ H BԣԦEҳf)n,E"uH41GHϛtf׳,YY*8UgmTOJ鴹J~ Vݴyuӄi%`Vxh!*Mk4M7'3<_?Lg(Yg̑%uwj9dsKC(Q Qsv/Xv-(Al[hK-N1>JvuR+k_m`JJlНR6ٚ{39z 0 ir;D_'?G5_k>_~W"aJCcEdHsC9 I$H)XY-R\n|9}1grkN!":w/卵8{Equt'lu=(J >.чTC+TZS5MpÎM,+o!kS 5C#a i-CuDIL]h +{qaS!EDqJIP&1"\  bF@)&I(D 2β#U!}mVqPo(C~~~Xzq N)L#` Dk$IJ^0"QPk+bQ~q'ΕU5yꪫJ#2'_MT>N7_gmIzxW㡳RucC VDk XgF)\4$"Ъ(^8\T5uL8&uQҍ 9uL8ʤz:H^HI[sĠsn#, POha}5_8$+V9Ɩc8zzڇiu"cA&RIؐBc0b@"@-u\5!$tI57S)_,DSJE^@u'.܊G+WpqkW˧F%.C Y rwq^qr>%+%DCK%R6F 3X& !!KaPű' *$[^EHAƩ1 !'BPȄ!Ib&d$H$4I S Q" `gs$֩7,WZ.bb4@!c2#!qw$C\ ҄ 4Jy̓P1;b/ž(Q@P0)ũ28 ar*~phCQ|PڍmVȼnwΜ66]ɛͮ$(n_@wOy1Jt<Lz8yz<7aٛ i2ph*?۟/Q/+}:^N׃>_C@ǒ$\,X]Ӹ@/)@P* ` H9P[ɀ ,^ ~79 ޒrQ8G|K×VByv F!AC?EJT L8 H2U0z>t$B$13nEA@ \< $3P';XVP  Ύ3m$qm}E,~l3e7Q(!):H>5DaRR3 3UB{3i㻲n]|+nYU AdيaٵaHS+:}L4ϟ}G kCdɅ"3wCqk## [z?<1mo_ ϪSNyKiY mtZ;1 6 ~VfCyJFMi$=.<懙s]|!o@'tnhhp"0H"͞j+/ kuUourPonE7a 푝ݼSlZϭpUĕԊU;'x9/7)4ǶAC!:fVơ^ds3,iΧU d{ fYl1}=^s<}$zB4+Be}1tqUzۺ0B>ukoet*XcͿŧu!^m&r>ukoɣu* j̺yպWN  vMGMY}.wر)Qʦ'(5_c}RRs&z7K*LdqlR.dg+2ή P"77BONƩ~Vy|jm-ȴb )_6B F0*X =ޭ@~4'毥cϢtfi{D{8+H|wkdp^B} JbA-GY-Yԣ J"WKR]9?m@k-VJ,5hT Sr}Trt-xC(QmRJXޤ!hV0%%ZyVJ6y A}icщ{=ưfR|3ڜQXhQgd~Z`Aseѝid rqy5djKd7&uh13| 1N0Q4 {!y~ͬ% @Gb-=QW17Ϳ_e]d/b!mWRR{ơ `P 7~:{9S] ZmܪprkԖ_|4Wל 9mUsONbhp3^&1dvrMBQHdm::vXFF5A@ONmx<<ˎXj,|VըĐ5wLmn6/\LntU9[I)˗MmAVR(*^!% yᐔ)o"40Cb^>Ϧ7]hu@8(Cz4KѠw/T"CJ_SkVv9_Gh*HEeM(e\c ;-:<Ze) #6cJ d" < d!fΔp B(Jy m9ߡݨ! 2}#]{؊Llz=EvCkeLF^vZ:7XYh#Ea5]jރ*Yi"{뷙6>yYʻbԊ94u[seT.ڂZHʼno2'Mx̴E|לMf "~ omnvȋGk,xvh^ ԡI\1as=&xU[T֟nbguUG|?Vwڦf7\^8*-h Ѯlؘ{39Vro-(4Y/ QK͑M! _SDqH9ʛkaI<Z: W.~ETDƘ>"eGPH- ̩u*7R˭ Mz|9OCd|-aOzR\!4_$&]Џ-u{鉻 94_+(ۀ` _חKgԄP2?c/ + 6e ^^]-\5 RL!MRCF.?l'P[:%A(F( s_=5 +}Wkc@"'6u)~msW6PP[#p>{71vC4TW?값"I׭;a9 l$ юbZcQ ˠ38؂cʕH k "g#_(3aXz "jjn%H)hQvհޝZ It6 2“BieY]ϧW27UEuJb}=ﯲ]ysǤqZP8Pa!g>H#'+)QUwFs혉!NHcqqVc1 q\z||LLJ-\$!)J(0LD4  A |oqÐD6  }@M.D` 0N>ޕLiԬFBtmnA)Lg4i-DIc]q=9ͬT$:p:tOx* N0ja0e (X'nt>> %KL(P2U4ժ)!eAawʺ[նUu!&Z\yT ` |>Se1q}wFYtγ$W7N`Io>].&q$1t۟fIЗS)Ҙ%?__gmz^4%H2ť?OO >!!*UƇQׂGzDz~`b0CsLq)ϣƐL53F(4bS$qHPc=願S8LimL3iҬ 6mHbUI~*yy 4@yP6(5|R mߵX-nRʿOʉ^Q|lLѓTRCi^9-$a#v5V64 h2vdgGw-?0\"\"7^ۃdRA LɹU ÉAǙўW# 3cJ0) Y0wx/ @L, D(1F$w;6E(&ֿh7 qiK[ O*HK]D8A<{ Ppn)[QG$y\gBLP vnhJm7@ϴde. κ'd6&Iyo˖ (Yfq9{p˼kBKL.k,v"̪ܤK=w.R-=eĬ¹%[b#r ku[Tbp=9k=_kQ@$ M6A=}g؎Wp=?|g P5?06ܞe|#zsؙCW\{ ޭn4#{a}GX5PӐvEF pƩ"yƑ<2@5+4AxԶ,4n*2_睽.ֵGkq2#EOXRe+ xds5jU}F8`l> )P&84\"Dâw4__:Br;~OeYprX77WSf7i/%DJK#wcL wNܠ\/ߖ]_Y_!=wn݄yMRyj5OMiRntk\nߧͥFѓ&KC|JͦMg,O~o߼a\M ε2h{5j+M)at.3V=||亂~7wviU[`-͉-d&D]^e:p>u‰o2N? 3EpWQ?hnMZu]-?5O7aq̚gu_U@DRش rɗj`\xMkc&|a}[E`'T>>B6Fn>(X(Euf߾ JIRK)7ʜo9i5VWT0Ǹ[7@Qޢ[K;KbGۥ#I" puHnT_+ r_(3QЯ@P,!Bŭ t "VD-˔2,㣱ٻFn$Wz 3qtz>vQU(x4VKDgbEJ*I b<2dRqg6H\@+hZSб'k_;o/~{pywXnwBGCo> XǜLoŷ+#x2ROdKO>ݬX+}<ޢsQ1ECwtoCKn9h/P%d< 2J17I$Ӆ9erZL8 s䐱fi{\#h3 h]Zh1=2P䔙۪+®X9CGyI,y,*-DE\.ˌ ǟ=QK'+nkƸ >lVLMֱuruP. ky9z347Í V 8nrfݭIt3ӓKq4h&Q?׳tRnH*s\:ֳN}똢Jjku А7!Rޭx`}T(~ahL067JjXH;OuҞ0]ż_gqtutCbМ ܲ8.zeqZ{D$B4%q?yW5(g EֳxZJ3܌*H}h<gͫF:eh{lOɉ ԋ\iBثPI"ĽeܔD4+'ɐ$Pc{fԬMV7v& j\5Y|mD)v)tXXe󹮼~bܰGj|;Z%-6n(c^[P:VA _(}?>(Ï&J,p9PIh؜IT$;`/uCP SLب\M#Ԝ|@4VHG6̠n#olHdl7)Ѧ֝܆S;0WjEeϝsBALP ݅?i,l\dkVZJoV qq| (gr`:(7xP4Ye[9ʺ><7{#>VË~]ΪÞ|ɼSf*=ӈ~oֆ7;=Iz(qMP5iKHN4 ojQuy# e 0lrtm{lx7 e<7ԕW< M)ޒ]`*Y.^+P[d3F1LBA0W$* QߞS B@ԍ>Jf{LGAIф?h -sY >ȏqttџy2Pt@gԼPKu iǨ U`H 3dAGTigb@.6f-E>#D ,dgHJa-/7w!J/7okq[ݗÕ]_^7| ՗w}ofO7]T| t8{e<;1` W-V-u+ k?i'Z-\ޔ !_)ƒmu˃붿+Tɬ[tH6|*G}/5ܰnCn}y:}ߑb2mۺnCh-:%3!pΚ_a+'Doj)xiC:cMDdB@a" xֳIKZK8JKeJJ3JJp RBR~nlQipT\iNZdRwfϨ2\8p", o轚T}vTaԙ4닍Y*Qkl$[xC _E{ֲ椥ǨT BKii=kF3Gj o$ A@ky~QRc E@kqͤU~;EKn:ԺBIc$ϙt+05<0*ԤRqzQZ/YU?5?V!OU9}w]/kf7ۇW o'X܆Q22e9w8lQ֟Or˥]]Pmm7y&KhTyg)&_ɡe& MF/\ۧ1zDuG4JKNe!~8hTf2]2"'t37kn ^>aK!Bsap(!"4YX)Ռס4k=c .hԬo^q8H'OpML Tja *C0v\*rg5)fL3b <s|zqM6̗ˠ'%3Cf'/ۚ؇ZU A7|J}53nAj]i5 ?#a,Jع^91/yέWTy'\In_v9K`ٚ0zմաcX!zGnA"EzmvO(qmsed]n/6uO>'Mb]&Z3S^S`% B{ p;vs*T'MND$5\rjc<6w;9piΐ粶WVewoَz3apo^}Mu8I d}a!WzMxw8~݇3CswslVH>jYmul]Mڵ'բ K/;ӓi Tz"Q$3B-xRlO`z$$աSAxҀe4栒Ծn;w̥J{37 {"[H: Ԡ{ͺxG6P#2e$_2uȆ:JXa?7UP r=z{:Z-\^ !_)mX7o!X>ʯXKAKֶ"5f{ 9qj?b~S޴ уRHqɧ 6@H}uT d{MMxn6!rOP|T 0j}e!h6{|?ij˵&a?I^UZjV~v+`@'yq - Os%D~|u$\M.Cl3U1oAF1c Zύ厄!GR {-vz`6L-?qdx!33_RRLВC=Ez|&&^KLwVڻǘ6cȁ2e@P a Lapj1%*|ۼ{l:It2։]P]0CV +tNUB@/XĚ4rd< -IgώF~hm*շU}Wy}۰u?w~PzeUgZfH3ZRlB;rmUe2,+Jg)֕.rԖuI.`B;8Zd ?'HVXeŹE, θ,M<(r eǍ4L|ܱq-/u6Qhby7D^]vTmdBo%0huAOHϷp; $ǐ2'~\gtl{LJ_8ra=cR#Gٓ`(ԱGN yvMͳ:e:iJ좏%^Y2*b=/1^Cq^$M0>IP,#'*9:wJ K9Z6qqyG6H췩"CO<͆x ^T  3xwh^ӣ'f QF;ړy(91~%o9l~TشoDWgwa\}ֿk"6274灿{/h;PU%A,6Ap:>|&8LȊ5K!T7d&38.ɇx4{Q>&ч}ޡc5<=]$vw Aw-:O^m#%yﺤV:Vћ ^&-`m;Մ}})Ӎ>d`4sH!!S`#&n}}Hn;|L`ֿT52 ^b=r;  [_@7RtSlT9;v݆@ hSZE70z) #˵$[n`}ǻO&뫛Po!evm3fn0xw[tw{;e J`O7{r~E_kTQuṵ0^(S)h|?D^-p|'93E&ѐd':H<0빋$6IP3 O/$ѐYECzhYrJWOv* o}|r؍(zgj~8M4> B:ԩ5+`c=a D'18^Mxe?FM'kR]dH̰z&ժIomV[e:evvnj \68[?M@)8nCE1>ށ:u(=eMjt(eub6xݵ}H>aMjK- JRpq(ɥfRpq(Xmy\zAQ*B.ó@)oVET"Q*krL&"]Ujeǽ{1OzӶա1CJum;!{ mnB{#7qHHCrBh/9${ㅤ{e6䠖czPcdF4z RNOpJ.'ݛZkN}srd~'(b{82ji0&:cNl|IqːaTpx.;(NV aTC[+r#]oZd|$B_^0LB_jb az ʗX#Ei9eNg <@W-g1$;sܺ!,^_#ѮVoB⵺@{s54bݭ7ᏫL:.\ |p ۟WdKlHsATX*6P;K~p",)UTXFM׻~1z]_HQ^z?nh߿ewQq2☌u ;TXaU).3~!^IU8Z,2Y}8GdJuQ{ZP3BAY ) m/RI< UHQ*YՙhBETӕULNG^H/hˠ#g ?\ Gt^ݳ苖I >CTB4/V$(J\[H\U}hK3_IV򫑃``#%S+4ԇC ʓ0Q[dN1~zRL݅N#=:1oZDIiV4$Ss(C31~BjpQggKW9@?^g׹b !0Ze#4БE }BBw($ v>IWne床u=n4m&h8kDᤞZݭffբ FK7R䞦˩!-zs=D= ^Z8nFYHuAbo$IuN61F٥:{HJu^J6QvCXM*q:&9*R}/DKuG h S%,i3Tgܮ)Θ7 ~c D0"ednX-Hr)R^p2\}PJߔyi[m.lF8jkJF5?Xm/t(EC)n%Y1 Eh(%RLjO6R([=vd([=6 F+cṀR Pz꛶~.6v=C[?Ӗ޴- *0(0rY]>UHN$' I&ل0ErPdҠ CkL(3>g{0VTO |iA{*9@?}G+ z5T+3bUR%a^|u-ۖoc>rRV Л S 9=kgK_X@z=7ŀ5|$|3^\?`'#\8=JlQrgVθdhX4fЎo(,.3sktxjŌD |& MM Ċ4EsRF19*]2]7pEYM9ybFRxteyi P'5PQ==ku䙨VkA4qGyu*U,*2Lʡp iS*@~}T8F0q˃>7FoW1Qv?n"y۪oB 4[ힺCqeUgH0ZfH3Zؐg.J*uirhjRyn2eJYE2[rxj!lOXBqGV֗EP$ԁ2ՠֻ{Ԓ\`> L*_@^*'-6.0X g]P{\WIЦZaw'J@bЌ 4lEyI,80ȭ(Rk%[{ycy@: {^\HGJx^8Ns/qAx@GfW>* НNR>e vMP1Cjh]i$ rGqN]$R-O^_ ; R/-G[WYP*ܣGJKR>qlyt<<0hU6YxnhC 5tico-2r 6w;1܄!A>$ГB~}t~cю[1SɏZ&S$2k-V(I"qks&{?\QՁ4$0 B_d{h{S8j:0iW̄ǩ9`S^sA4y+jd#̀'oҚVJ-`:ǝ=9ž0yLL ׭H&HpP+byAc5CA^C31Ğ@Ţ/߇a ~B5.{ߌZS?j :J>Ѳsc`,%O,=BNU2`tB<]MgN[(oH4.Y:SZާ~|2ɥITjP޿BB8 mR]wH#b2a'wח}ƶt^..zym؆,{SOwCb ~|s֍}sgG&tn\ڛGۣ4moB`mKr^~u:Yܞw6bmd<0-|F\GҐW%:owF 3ZR RT9mƐYD6r-)>0ߎu,j0ڭ$X|[9ӭ݊sLSnh+W:%(\jo?J6 ('KMJ B2ƫC~ Mť nm|F6ڐgBM^VuUHS}ν 4}+xs$$k/mHܰ~m7``oX6V'g4~M 9.b֏~?(tVK]^l~{[rܽ8Y{{eۈeh,u^@%;)|炬JiTۢ U8 w=ԃ/y~ j \۪B+Q3%)s"4 R2=bse)`PB)pf7q',5u˨J5 [Ҫ%_,+[]^FCpE)~=dx5bAdp=kIfWKz1IPũҜOģwMlޏG5z/Hx?_{ ᫖CL3᫴If#|uL, e&%΁@ ܛEHwp)BU3P޳i$֬)GSZƪti4RF3WE*WnE8Q&P EYuVp*lDUjfL G R2ȅԤm.b4KH|>7w_[wm==hTX7F@qw&GGc}uF g+ 9lY SM&#g+_% !\EKt*&>VϷkhb:ȱn3f4 eYTD6r-)Ύ~Źc gnN3rی/S9ڭ<;?Ѻ !\E;::+J$t`˴c,)wb Ն`HI@D7_,4}W/?/)13C# -/3?jҭ=Ҥ^o/O[^UT+h(ҒCHEa9rkCÚ`c sſfofj/ x/vq65\g]ȌѳyJ-V _ V~Q)eLPr✉ZH9N3!ЃP$$:;-7@%bT략g֢Q֡7eHMc~qJm+ F+T 2YJ,jP/@vtmi$iԍDQe3{#P j*3IP7׭HA8wˍl{+͒jL,o?|  Iګt hDRrቧG$.=DcM(SLo>B63'LKbr##8ILu8I :էP#Ng^ ˤlv?-E.F1)Lg͛03Z/ & L{Js<_ aÐuK/FPbٹ= [؄CSXުģ25xNi0Є'K7F&{p$N1'!]psIdF Hqa5U)@0j0 k@5 } @ǔI>M]Kesk P::֔ׄjjD VTih㔩^\-KĖͮz8ۚ1=NG5O.rxsMb)ǣKzq8'z$ޢ>[R:xBFd,[kK3~q;ьI&kIPZ k e\,i-%Mx\VLR?^rBvRTMu<П S A ,Dn1wNVk=iPIiC]ㄱHiiZ?L5hz 8jAVPZ)6?W[?Z"뎕(syzcvF (,ǘpOG(B&l#}==€E|/?mi’0)PD9A[x]4Q<1O-EXi}yts0aLIw9 0Kg1NHy'IN}ryъUpv ' *J8VSqO!C"rgsڐkV3Vip(A;-c.$ 3#2CҮ1ujfÄ$B@4u4q: ;U9IIH( F_6wp} uN̓CδhՋzWݝҕ[Lxw7w#rlZ|}m_C{p *@O[xz}s>>:_EO/>J0W,.o7nD¿{?+Fqoh7XǏ^ab H*E"o__.xɭW$.kG Xp_o7]{Sm$KgS#bD$1zM#r=S dS}0 7˺Z/zEpV%xþ|͸4z[k徻Eh0Ҥ<SbEqXD}8_կ9=$xB1!k_ lyK!d,ֵOhw'Ǟ) Ը '/ D*'*p*!< N:5䏟]# '?#\9!!zI:Y~ ar` FHŷ>ZG77 33Xjܖn.k{؛[뼤nf鿿A |4};FY oXUqkHkpjpA iInjGn?|o?-ޅm펑:[l{iҷ?n՘*='z)('":FHh UmKKC7@ 'z!g^ǀ#Cw^mF.sP[l,Zgu>̵k_wJ{w&9qy/5{n&_N*էOFkTgNAXB<1sk{#IJ=h!PeFx}EEς&-ẐgB;1 ]dջ &!mvŠ*hg(9FȥSSv0kdl־& k4GalMcãckS^ "QZb %&,ɴ'a B'Q*#{:0yɥo$sNqpIA`Oz2 ôJ vHrK# 3!p 2t1 1£"S˃Ch@_:lY.}&\b-ƿh!JJg ,98s|p_J$NNuGouG_Mv B dw}a9Y3ʷo\_,^k^~۶}hM/?]4n!CaUc1Tson-/?(}Cp޴?򫡛ۣqK^49Ѻ5MCt9~G/?ZTYkOF\!\EKtG[cӻ u+ EuJcf\{ic[rD6r-)ѵΨPGa 1HRgdYvo-t֭/պ !\E;:%u*PzQҢΏ5SKfjYWH5v4(}aEC 3$-[ <;{q$/{r,>^`$L,1{pܖe٦H#mSTXzbZ  պm_WLt*ɓR$'/o |Y;p9w"VVa*OC{'d lJIB5!@ HWz ~yBHzndDTZȑynd?XX`V|ׅcZLt$2 MhZ|t\ab RDc(;!I~YLNP 64/ϨgdK0^Vbpq98+ek5~یgv~$q6MIGW|_jFv2:>P_ͥ=Ҝw\3{GP{!3꠵K9.%ܒ) Z !$F!R & )::F 0&MBVI(#hP^9 lY sw t7e&.=o.*UK=.)Ԓ~_yƝYJT KC}SZ /\z\JRT:.M =9ܸ 4oSJjޔ!enaNˮB_ ~w\cfşp#h#r:h?'^I<{h;ΞPZ(E I0DIU E -XO}/˟*#\AS((]-%Hz(Hv(pXA;n}9| -4*frB4HuDi_P¥ 9M,H$e*T֪44:UJC%(,LODxT0*i!IΫ6J  M5Du=BA,I"WQ(3ڰX6a ]q)+6 49?6*u|h2O,]htʠ>oPkF/ixg"B$ 6BxcbdTybA0.NC`)W|Z4^Ӣ ~ZC@0c$>I CȦ S,QфM5) )!Bެw$8!E?nxyYQ4*>!?߮f. m/^a7oVߏu<1`)_ϒ_Awt=fhE?Sn6_x݀zF\L4׊$ƣ/˧WN)Ph3PDG9~~xªhIWb>#x]= eO\yB4o|s۶ 1wxڻ 'Nm/v-~q{>{?bV?^4>j/ǯp ԶFt^59{JhKj6>oPK(D/69DS$r1Q\9؊H\U>h,k)Z:dN zky\lyY O) ?Jztki:tzҴٵtk)?96).qnI7M [SNwԑn"4)tZ5)?9n ~<%i(Vǥ/}b);&qqty# d?jz^対r/OU9. >oPsZRxɈ=X,y~:.29R}ҳRjǥJAp)~\A- sUYȮ"a}Pߔ.q,K3,.=7.'N(?! ?ZKR,ånܲ!NPߔJ]dys_>v?5.xC-9\R0~\ >-`4ZXOͥ~^}9C~^jV~ҳROY*NFX+W;È$  8@Y@'ִ<)"Q b,⒥L~ 'Re#%TG6ɮQPPrLX ^-'d`AW2]Fh׺qh] 6 LoɷCI4KW>lLTJҕ8wWb%^Cj7f4zywuL ? ܹ-zۆL!HW.eDd+C[c㧃2r9>k?%,6.5wb#fz$ &JZ>5. TWYy?[,/5xW R.ՙU dy4^ཕQZ0^X!\ֵ:h]E0wT|>м4RoFo1!ucD RiF_Ŧwޥ@5JEQILT S[]=7sPʧ$p5!<)\ }*.]Z1.V{WVoGZ[&` H;;ik|$H害(vRp}[o If5;zoءpɡm@3;ʭŃv0E[맍bvyh3iYHl,UC`S0*\&;CbDq%P6UE u`Fj$Зz.NBEjA]gV&*rв(x (1E2 AtXFz+g +PI:غ.|CjTv|"tٷ"e@ ʬ -{ػIxVj֖RvsP^39TAXsyjWA'37²c`>OA}NfQ)2lB.޽qo^4J[.*Jfw7gȏ[' ?C$ \ւh!;ޱʂ<ÅHn!~a;pH4p Ou%~:~|O QDsQQ~Yȼt-)ϋ}xggWkz=Af) GvQfmdD.^6a_gHդAI>!ڒKd#cZ )3 (l^ JؿG?',f0/iz5+q򰸳ӗGsMf2 ߕ` n3`x7>["~C Մƚ8Ec?cB"!TPGVT4p(W{+(=*'K(T ҆DI#Ц:MD2IeZƌ0#)s=[N$46 V6z~IѷΗy˜/mXѣyyjg?߮ffDr y~ r@ %E^ϒ_] 1"QzO>z(Cx.۷ޓ[9!$+Oړ FC(B-492zL`Yyg Q?.͠ ՅKϚKKР!,''Mf_\?YCZVʉd`hcbtK'T{|U6DvE >E+jtt'үj/v=lS#'(Z N8T y`$耻qHcyPk qeU2ɣ r}"(C#$I=."VE-HȅhL^7ċd.qxf uI[8,+ڹ f<} LPA܂BdZjPG:2&aqLl\PKD"T psePK9v1ɥf"8[=2 `)%ņqjD$9lJ1 ,IVQ]L'V@e{@bc8,(h 7(JؚO"GV5g UVL Tx8Ls13^~v56}4oE9>Y4m@Dd5TDJR#.-s2. u٥u=v:jãb2vqbrywa#SۗZPATu_AyGÏ(z:wg⡆ #&ȸepCaGj BRT$ Z]m@`I t8?qcG~0.X\6ajQe\ m 7p` {*U!~v:pu$M2%߃^1&24悀A5-7H8 &Nd BSKA}SZ*.ҳ O.jf{[w/NѓTD0ւI0!1nJ B0M%`Q0ġ1qbFBzqm!G'kCF+(dk >wT-ww!dF/O]}ø4|}IuF'c-o_!v&UZfa&'>o;F"pCD KC}SZxsR~\qiK)fP?{WVc_>PpH.OmZmѢz.~(WCȊ}8>lfpqmKi[.>e8)U8z[93d1 zYK-އD=TBfBy!dl 'W*Fi)m2b?C uUn}7&U=3IeLyk&_0vn)}H/K]d#H{Qz~đe7QR>~~IIf\ ԋ\y+gk~B[{ɍ'##fWHP ܜ4;n˧wmSû5[[M!{,ӳg.{Q]KĻJusv+q sB5z ݃z|ӎnG#ϐ'OD-yCo8Cu+\5?lRlPR,NsVy x:H aWs8oM-ϡeΩ_߹˥}֛M8 jUd~88S_ohNC_BuJl8h?ʳZ~Uh|MP?Ͳlnw+oҴ{S/??x6 Okt0~v\6|zk薨%B Ah’;D)G!*Wt _@jYhp5^\BFQFt&0XkDD)RHPaݳ̙B/:p'=)zM]j)t >i^EڎBk)PO}/CP#tJE/Ŵ)zٶ<>K {HU3s:Ѝwsvew4 ܶs5jܞUwwH397nRtiI4t7>$u(Jy(;rny(6Վ:mӷtZF8t~05wД.\z;QL)xzz E/NE .:7'E/{SY_uky:6޹/~:woG<_l6˯؇F+I :F:fiYS6K۾9Y:*ΜI;@G|,lg(G if9QHh5RNEz[Ƚfe]!xAè&|]g:@V:@{Ϣ$>79P~>fѢ~OWk:Z~÷'nw2%kJ#ٮ}TJE/å\ c'7-1H kyg3ѣ'HWڻ'/°2YsAPbJF"ݖӊH :ѭM&Fw.gXt5ˁָ[P i?xjܱ<L9-?qb蜮z?Sw>8Y^?R'h++t%o Hs/˦<|up<&_,?EsREsREsREs-YjA-XJr uZhl$!E!f*h+T>Yj}?+KwC4JE )V-Jg7Cp39{,ފ*(V-YMR:plӲ_Y54[/z4*Zl#>(dQLP7HD$"X9 oIerΐ _#"JN;R RBC|)6FQ>V2 8/JFTi(N'NFV?3}ȈsatV# 6u7nk/KԱ.j;;}H 5r^S^ Z3NhȒSyImx]Xg:4s~5y H<9e]3gFVފo P̓_,3@$fvj!֚Z@"vT!Zi f:O<|m( V!m,ؼ,-چ"KUn11gJx/zw"հL6+#s(GO䯞k#nnOw?/ɫ69YpbcUadI\321Yo*#>ЛRZ#m[Qeܹ{$FZXBL0:v_Xn ͗7F2sr)r6y报MZJRUo`@v A %iI SJІo,QjVt聏3ٽOOu֤Ee㻝yĘg+?b^M9}Z"iȘ6F̥r¢c! u; dΈ'8qq5 'L(!) q1F5rwq?gRA@LUNt2#b$G6,J0 2೷) ́5C+is6c1/Vmݧ=1Y*픀A732d@)ԘH Ki(h~a6ŜÖLb!y]ggɎOim侴(-IH_$׮ םk BDPjhvpF*9tl  ?~('F:]2*dL'lH*"rNpgCҌjS^:_m ]n33',C1iwSdo3[h)yXlb 9e݋0F%b_jħO f`>Ūi[D19R$s֓ obյ9o>ߍz5: u%9*lš`ĭC{nUv;e>dvF ._NgUqzk‰Kvψ_+ۉ[hɋ$M$o>;Oc"8jk[;A^>wv.N+>ߥuG9|R"(ݛg7OFIpp VY7h^Yɒ5 x&G5oDߺV@UM6EpIh% j TV@ gۏ/Y2N\>Or@\:|5ǭLw)q;5kGٸoͩY7Qz$zY̳fSzD$ȎRjPUf#ZbmLꑏݧ. O2P3},8nYΨC{ 1Tҏ'G;)1 e%b08X7\Tm斡ڗ#X H:K??0(O!ķpOZLMR)N\Q^I25b5ǩzvWF3ף'ЎH)UP&BYH0ܰ(ep1_4uN63,/џ~FKtͻt;M O¹33!@?CO@k#}),/'hA qtN}|(gFrx}exA JL/wҨAz4$8))ko]HSJ;@PH *aD,d x;YP]}MZ}s=>/툞>=R|\IDQ`ejLF:P&ъx9$bL?zj_\ V\>fRZf.,oR %Pwx?zj#.YH _ 7VǼvIYyE.gTײ;ZlZI2}6˵z  mКbiBmWQzG4LL^qmV?Ļ@%pZ ɇ&GiPЁ1jloW92xܴ݅42!|rM{91,P d4[kgmSxc_|h}L7@K+4G[eKֳe+ZjtسL U:Jhy(G|D&K87Yμ֎)W;<9P]?@қꯨ:0z"\wbRGXXQ4ע b* ].g; dlPfPtU(ؽ MR,#ȏ!ٓAѦ-@ v}rej4w-L"sU"d1N#oMR0 G\ zqv6 ֳFd"Z$}$6YQ${v^]ܴ*%' غmu5TDQ\}}у m=iׂ P.qvkb5Hs!S|t{ ,b=.KYٲ=B#A y3z+7Z9w`3%SOP`+dݹH/08)u{:NM>}sp5AZF'TL lQX8Je(כ;B#(<"\'UM;XϢHFD0VG1knջCRKi`Y!,†VR8~Ts3k1M$13|7x`}zVZM 0ǃl[PJPJ z2{Zz 0J1Lb$?uzێu.,jBRM $#rrG_v栆|^^JW !2%uf`lUQG'W2ծnP5X^i}bM,EjpcFF8ς e(H|$q^-峑+_h$_k`ݩ\me7dCOрPZM!A: 2 gz^3`fn}/i6,(F*8'+3^C.A;wc{#De+zѼ Vg2G Q^iV+-6=>Mϋ]`GjgġÕAa$>(EL)Y7Cy [ϳF5S㶶ϓs(Pb|rŒT0Q$7"=bh>;&m@/D/ύ484<ȃu%H@Z5-EVR8,$[ p̂$iȂ,ҵNfgJ~VA+ZԌۿ$'G2ߵ(f腴)4y%Gkg||V|{M8*Z')-8vwvqLxm=k%N^ki냓J8[>/|$+-wR2sxhG%c>b-bV<ڡ;I` v|>Vi ɷɐd•Ģ#99_D]N"LZ"złԊ)b(h1pI5dGm=3w(ϳ^X@C/(朽uC<I/>RX|2kࣈ4ݛdCeW(G <:cBc˾%,FJE3, h͂_7X Kk @])GP^eOX R%~^j a~6)l xU+MP:Q8"aZNZ-9ODv:X( =c/YeH*e#$.i.1BO6B۠ {,Q_ך]ٲJ% ]s)Gtobߍɋ5nN$'G?BLa1VVߣln,@O?&,/IC]DzM= (@@򱞭E)P@Ͽq1ۮQ:x2u!c`@ PR#WB拲''κ,qǯw҄l_ Hs(4 ̔FLI']\MxGInr81*ɅL;Ku@L_vL׫[]M"Wb` (P#d %rP|(ԃpM}5oEG[Jp4*kox^y$ rn0WGc9zѢgJ|PCocBb3d^ Ĕxd:HNyE?/| Mf0맬~v_4:ٿf q8u-s4ESO h>"yv9v9?zS!ߓɳ1hs}yӎ >}_T:Be.m_8M/Ϗ)GB_j&&(cݵ+yd(KED|b}~O*VvzF[^*_C-ٍ{)gC_E/y*z GT;n͏[kx} գЛ<U#fyM#JUs 9~0;!b0z1W+=$P:y}<^7^wLߧE/{m|'}S/NI޷ L4u4_ax`驅\~ׇ^v!>|秢_F^I3~~S0.zw?OiR%oWW7=CB*v˿ka4H~zr<=s#>xF`BMN? UmvNM2gA=Ln-NO~?Ru-I/n~_Z?G9/lb #/^-Cwmmz94շ?v8ٗD_m%iX俟!% iJ %R5ꪗ{qwNk{"m1r h0\kYτEF`M=_>iE?ĦZbtՎUuؽɧn,d:oteZJg釁u0 5Gs W[(`Ȩ Pq$uB;Ŗ]0A3hxP0' @fh5ɡi(8ꄠ&qa:Z@T8_Y76I~}2>{TȌF S#iL@LT='LC۸\SKBx.Bmp@qdXrvX:T<L@%Я MO _$QFgQ[OL8Ț%!sN !nQA?|<_uӖֽ]qwMg6ZP7P~ʿ ޕf~.X$t:8wc$ կa`F¼;9Z,LrK@o׎=à⮀4o҈u.;HfkԮF7(oAшޱÔe2!Tu%&7988YXE&^Ϧi2۳B% f.s^=pϒWoƳG'o@`?on<';HK0^$>[Kq}7n+JT*T;6VeܽKq?#;Ïm+SLkꗓCt8ʺ2 (7Gw#AoJƒ[%GܻݒӐ3 AjwvJt&sJӠsPȀj:!ZMֲL7w+­|n&ju d0gR͌71OLt,Rc2,qkRViG-2j3Ȥ2E2K *St2QXoV!HiqҁeR_~ۃ̙w4_y+UQfɝޝ N'gռxpPLG,=#xb/A wY^?[%6LӞ#?OYEtYeAf?!$Rt?ʑ#^n*ieV5 ^6)FÖe|y+m4˧1#+%$k(ASK,.9иZ z CMxd+rA͂lےl<n)־ w/lw3jiYlPHv3>n 0(j)"N@߯h7 ) tdJ`kZZ;[Gw*xLSײpmpT' 0.C-7j3E(e;Ey$fG¥A<&V$ E͓{42GQsֲ1o%2p6B2DhY4C=s(}kB8O5%#\tFS=\EXAqRJ+k!CC[xkl/,cOοO񨖢TYe)Hȹ܉pHo7\ ύ @ҫ}#RZB+dB>sG3bp`"ʬ$. W^r?4Eǁ6@/&i|ba> bg`(A6ߝ,7,C7qy{[8z$C4D%I!~jKKdjVo;D0t(#*_IX #n# 4cIhE3vB݀חBTJMrl~/蘍 2'ROGhcIL(`ϧ߷un[~uJq6o4pHSSd% $ eOY(pUp[ND$"C*mOB*Rdg>e $Wށ9QiԲ&boiRPg}*w0R YIùVVѤf 944mΐ Ny.$m?6>mG]y>,F)0WhF:@bUP+(2#jys3Zv&$/C {?Σ%D9&oy*9{`:$ B=DـׁՃjj>%h<.qEtJ0l숖X{Xʽ|hx1԰\ ɢKnUi%;pSnvwlz[)ш;LvV@՜o2̼cȩ{Rݹ,KF6'PšJ <6I=v$6~bCW9[j8B"p*R)P\@q$+mmvzns{u`Ѥ<92rTI\S@EE%Q:qAo͉฀LPPD "8Ꙉ!p*iJ<\ +9=-ϳ fҘahNPv0hO8BŰ֙%Z6XmԌz?|PȔ8|X`y汗a>Z>r"V%6$7\?qт nD",ۦhf@"7V5)s9suŋ*t.ruF'VKFy2h5EG\c])EmsNIɟ#Z(DDyNHt@#9EKvmsZ)IW0OEBhӑhT VXÙڿkEVHϓɈea,S HW4{L),p]K]+mIG$]* &Ht"3E԰]ȶR\!\6DJZe,Ge-W(]KRg Z+vFr9?zf: ! ZT9]0$ Osk`y+-(-ٞfԂy"X> Z\vS BG=yC|`e nmz}%|l'%E$(m;3Lǝ%bf!< hsC%_**A2k\P掾UB<)%W7Nݸ1M/ҕdVU]G_s?/p-a{48O҉aHji?J$J݄RyBbXjvC岌f'ԼJt-;:ZF*B@RvrYkQy,޸lz g+߻7?+Z]Qr0-MWW~+)j=/͋*|`hф˔[fEobVȼkB;,ޣ6(H@Ke~ZօqeFYSYw'Xr "$<6F^}x^߻IBjUʾLj$1D|t6cnfj+t>Y D8V|g gײ2e}B/\lt=,p/+UG!]HlH8 ?^ zT=*A^4&GzHAQ#OTti*dL9iɥMTStԔ6qmbp8VrBͮ/z1'CH*Ѩ&zކ;i5Q.nk]6h!{Ddt;N]DJ0%R>Q A+T:2A[HoٖKk}=l D+5gg2BaT#\]GD=xj/)OTlՙӷxRWcn6OiJhfnC)P*EˑfiA24 QΒ&f+qhEYVet&suˆʒ֪>fፄm0!qH,{ Қ `Jr\31pCݜT2up9Ul4vŅr.Rh)!"h`/3ڐWREl%*Cvޱ{Pu3i_MR_K>㳗{rE(#QoW{Q^ͬJ;s݊YHq˿-UĹA.2L\˕dq.evrm`+%]Ӝ6`79ʑxݼkՂ1i`/h#Kx`vC{سD6B9D30% sͻ[R>혻)ZU@y` 1sFZMsT9pΙçxl87&=nMQ'\.|Zn̂_14ЫQupc)?+{L_/%{ ͭf_L!KVZԚ.6/Aq,=?ybpuyI^)eYݚ٢|Aŕ/n??E_㷸~.G H;%ľr L7o3Eo7liBm F(UZ%P %n6>y)Ee.0ͪ-xh8G\ *g9,J_BBXuV/|%tO="VT%eZ'11y2 58RyMv|C,S!S垟ce'Gw.4}L.Р|\Zr6f7)־l>(`ns zy1'}e$c1[KayE֕3tm9LNoyҮF=LzKyzfB }KyXrN2"V^JSY$,c5FX0;rwFQ4 :(,DM.ڛ}K2D' #R{%Hq8ܶB0=)ŖŦD<|-/RTpIɪ\‘c˨Ѵn585c>oyj ԭ7MiXd(zݳ#5SWRquBG91=Xp+uPµ]RDrxN2ؔx!WM!9'׽d:H|G#γBoP}P:B9DƔ$UMBD$Nj.(qkVb)DU]8v݂ѨK;Ke+)gN5jQRI &RiMM M.QR O I8\I 'e;E>:K{ۢGpGq ڊ1ݾG,rXAȑ8)*£ RЩp ˹+:? Z$|rm6HNV3#ج4ub3H" H|pJxm:BNsaV*PeګUgS:[,`emY)Ox(x=%SH$ת3g91*E CH!5(߂hPT:b LY%(`X# 6[us-5mۗv|/F1$(ZS Cd%包jX,"8Q!U%Pbc2s]+8H%{Orq|"7:8S6WJޣԏ/+~>s ΠNW˟5>}5澌h{ͥzQ~ѧն,lεRؔ؜0d՛FUMqA# q3Wu~QPJV]ob{U/ i`9\҄*%Ȫ) )<^wGaθ&L@Yux% %fYуzeja 2\h:6`cQIa8"`T 3 xX){ѴS/#&;0Od\j,N3;Դ!kS"sg [9Z٦#\LQΟ6M74?´p"[{C>Vzt+nc"[,Vuء V1HOS#Τ`o4Ӡx,.{: -/7.z@З(8 xO~yvϒ?3ВjR~mmԚsrctaY%ePX =Ԋs.'BRIAa;zomZ6U8f'}ss`j#M$[R>xhS=^pD6B9D30%ɯ8׼i)At2˻q!OHn C)%i+}Udhǜ+}]鳧-'@) ѥ [E(aupAQ}uvYDyj K9<A[/f18i:JMSYâ.t 2_]Kf7V_#6q96&:/15+!an8teuoǦ7':..fO9,Ξ~}x]t9W}ۇw̾4OWSQm0L^pXUmrDs߼ H?~8CRwt9=GIH>ުSZWn4-q~ޔ[bQnP>DS8K&- #,'YCA=Ӳ!.8l(* ő# 8iz˚:!KԲ]X`T(Ob5ew=at JmOd=m"D+?jOñ;,IfDO(R(n$&$؎=$CvA)2 _{92&im^;sT?ϾϱfSr~]WGX%HZoX_ːWwW*eQٿ#,,~ jO5*g4/wV␴2S <nJyFNc >VAi"uٚƟC:-Z|,Q%Yp;yKpt7:$CҝPXDPx.b@y@f;/Ϟo0ZØ'X|ď_uݢ^wDEȆ,(3_="=$̊"n!ѻY0i)$Syjkr_\6W4q'8᫞lr<-Z5NX`1KxzX#4_ZuZ? :2٘!ڲ7zoߋ<0V W8."La_+|VyR9/6%mMqPo̩; APǫd-8]#qiAF*pHE:E.-(o }A?h- 8e,6BSig]DiS Z H j^ Si;x(xFLWS*eP+\ոJPb$ ai>/%)+aV1)ov-MYZ9!*(wM*QjJ& +1|"dUz‚ rXʔT;%RhI\Q!Ժ)EPx9]rw'X@$]3HM#Ե6>SR@gK&B `ArcxX/A#/_~͔f`|zs/1RWec% NzYzAMY4|-Ӵ֊3a+kp >o i S2oZCdkꚵ߆98xE9.ss*oXA0p?uZBpX0쀜zXlMubhhh䛶۵+f4R J@HIUAإ]rք$*&,qɵs_U0|D| m:Fʫ)c>`0)ÜtN{wdDV9O}wj--zm)%;dT* {Kiz*` 52 )u[m?dLp-[6H? _LL.h=sA*u&4qMe<'$b8H.R l,O1И4:]RPZK3Qz(`sFiT4RY;Q*KvsE >H=ߤ*γ02;gOw_)iqlm.Z"ϼז J)g2/D$-fWTMG m= KxZ>׷x)}JD V:qb8đۮ>ki 0$-8 "AK(1p"vxcj[O * zyEx_םw^(~>9wJ]VBӘ}j;(h޻.$.(G<AxZ]$?{]-pjG:a# <@q> jUcD{Erޑ@Q s}t} VK؏:/-+qoc+{'cgbؾӆ+98 BX/lsoqZZ]\]9w.{E:j. a>Lf&q>Ω~܅~$wa;0_4_z8~_'ItpBO} 7^nld>xce@row3_p4k0\ɓ`,|(w/cs~yhA 3pN!/4iU*yZ[_I{Bg{/Ay L䡽%J9y]i{;ny7g[ޣ%s-g z18F0>ejK8KPg\YȯPn8b¶ox8F1ޑꜢ@hY-nzlAڸ#1Aipcc_61Ov_F'6 tGf(ѡ~t^Z4Ʋ!B1t=tٸ*Y<"^~G@TQZ)r}VVk b~XUj7f#rtߟə_3nxϸ3-ͮ7K<\.ArULG!Q?Ϸ oX"c10=-dYD: r+n ~Fx?B4SFFZ\)aպT0[j>Le kn‡z!{XhաD+Зbc_oz5.E#c>U[ Y0A1UHG=y@'Vg\"tVEv.zZgYN+bvg%eg\+),D^M3g̘ڤ'++KW21_ή򺒱\ tRn%ȨY[eC-N?yc~,9  qT_׋4UR"iig}外\49œ<  rH y43ړ- L!ǩȹ1켱י62x2; l=1fABSlI){m9+ ӂ+E )_h8(SNcIl3?Slo"}B*^20-162Ls'M[=aAithpx9#)B#'lDF,MNb:muq.럳yn /w 3uY* +Hm$n dɡ25BtΙ.߯̀)Ţ kȜ/JDdg)O~l&k 0o +~_ͦ +5J)99u4Օjqc4D++iw9o$=D{| |ů}/dh B׬ B#C):D_=폕x3;FpLIxi~})qx&xѓced€*FR| O^}h~x8ŽRG_8<Xք !N^}'hM#Zsƥ`hyT&Y?HDr'DZYQ]IhtzȂ]O{xqgile$(H?J5bm(&m`!8T%JR@]ФsrZ)d.4y^MFdLHz*m{xI? jBbJA=,3 QO0%!e5rsuɪc*!\.$H"Z-Ⱥ=25Fx|{G4#Q^ L ӛqRCDۭ²k&*HQ< 8͵7N$Z7Lw*M$(#$L^JF)Z \3o 2%*AV1C"kGϗŇԬa'm9]H!4GgLg8Iܸ3崾eA$XaКU 8TCҙ)j@(Tc)^|ВYuӟ~xщW ގ(!6'ݜ}}Ī\HsF eDQ~1\ũO7֔A]Zh5L|B7 SyO ZEF3+CF9۵dv D? 'HL w}D Z7DKB✉Mă"0\J }SFY, #H8W$ hMZ>b#ps P1)9"a;X8Dy81d"m (8JRbw4ރZSZ`hWRp[/=pXD$ Z#^wt;3 6/@!{3`MB ~e[SaQ} {5xD*vJ$Y(XpkG*"H[3 XD{7KD<|EC{<82oa x>g5-$@Gbek X<;ȏ>}&iue _r}&kض ޠs Ӓ !ApC!ij4mH5_pH/!k7jg#:> $ I e><'V_;v)V7%Y~wv涜1.(:y fY)ٱQGqEQtlBՅɏ}-ߛqv{꺀B"e+HilxDb;"Lp^=>pS%Ws\n@1- M3xt8/VǢ(K!gq.lW ˻noGX. Γ@M{QIQ"Jn!c(!B<rcIU9O D w!sa:ɹ<}#ZdBno(Gqb4(ᨔ;2|+)m%嫆jJ4TWH޽y1T&;5,U=hD<NUfskw $lxʽCHQ;렫:Ox`@E&퇘ŜUhìj鯛ng-y.sCK}Z-CI}* xP=s)j[@ios]R(̩oF(;N}KfRSVTj(HOUXPJd\eeV'-r\s!X,Ta%?!klc@pHT.u<&(N/!&RU'y_Z=Ku26jM0D|>e1k+%IbϭV"]pK ")KX&0j,*@^ ¼*`g緘&E9J㷸EV` ?e׋ߞgWk(3AgpeniL-:79SX!7f<:q(-ÃN']]#rήEuV|W_wQ^=MbDUjaL Hu{ _-ѼF4VK{0w2 R7YxyܹeݖTmWlaa˥[bX鬸 ՍYd֊!e7j0kɸ|5r&">_yT4NYsh^ޟ5c_fGM;i2,2w,u`Ldrܝ\xp;~<˄k1/ d{ہϘ<"qZ~.Ѵ8yZiO3뇼e%/y ֣3Z%X hͥg.J*ŢK$&2[ڍcLA5Š4}6SݚnmH7.dP=·P~ڭ!D3nE`;nwn-H7.d$*{(d[4s2aYzf}wN-.&[FUuF8Z<);J|޵#b`K,ފ0, 6'4Wۉo#ɹ,0%_ڲlw[+3IVWbYjVSX܃>>JP,޷4̂ky{순 U=D \O)J\o~gmp{$F MY~O.dתW]}uo,9mھfc6$Ol_.zxsɔ]Z>ݲ#\dI{Vʾ|^)D-PfYX90Iݵ4E`$#GWlYGH0^'y69SQ#Y?41u!d*`Obā[5E6[\YXehN?`D@̔X''Jg˂[ //d,̚e|N*o!gHs)=xMV[&hS/7 :HŴ!l 1_qJ&Y όm6EҤ(ѹo _qFlO'Gig#񚘒f03vFWA}!w ,RXDxAWoQʷO2304S4]V,Af) E#5RTc|}b&_Ib>S, bOUj`o!0 J}ճ8!Cny9 ^h_z]ߊ6gfi ǁMOgd|nj34̦c x=K2SUG")9 h*ŭh7Fii0Ʉ})'QZD P)]DI+OկŅR)=&̧BcufSh˔X QBqKW\Rq?،h(? *%7C<;HݖҸ-AU?2RB%>F4cV'[_xF 1IX#M5)fSAU1Segc u͜Ar%@b}Y_N4r دx ?:%ٖ֖ۧ$+F"S(2ɞ(hx/?9D#N8 7],3_ ~G''`/23~ :[?=s=O_Db1%cH$B@6߮BNԼD崰/Dз(ӈsiN 33Ov´Œ­^jKF\D:u O$bV,ĚK4 (Q3$ehOmwD/_:D<6#b$3Rk;))e^&ڢr}\8 GH$£TB:(*\t> \ ^8's0[D(21,q XXQXHk@p‰bvivDRnj$vDP\QJlWPے ;XUONc—&^J`.zzXGUDByS/^)Zȁ^^ ZNm9*x5ՠ#])^˼T57Zp(ծ~T&jX$J|斩9 9%Gnr~x钋9"ȑ;r ׻m-D5%]nWVr fy$[Y؁2E脑L3(8B}PM}K7ׇ~ӡ#VX sL6CdQS1QؘWm#5܈@X`݈ r Wp%YKJd dέ"hJ]7P&UBFF0ōgGdIn$sq ,Y%B|%U v Dz wU@^*^l-H>^j`XaIq}#|!WVZ#EFp_Ynb>/Ui/!-s9pc*J<zECh&a?t^ܵ )4,NI5֓WK>|)Oao]% ]k}x =Kn6Sw|uqSh/I!J|K⽕V{q,LKY;NawdmGoV6Hpgm0ջǬ1oz0*xniIE/${\QZHɁq$P/Rt=*A1KHåV!䜕)!UKϜ3d"@hK\Gt(mUF07ry*xւhsv%u* EN).}ۜ 7GyQk>Za Qgk$dςRDn1gnnC"e"BpT)%E4=G%KH~ﳊ])pzp1?;v`lode6{  "7O{K-G-F\HLDzwd?n#'Vʷ\X)&Pߟ>ZkL+︣E[xǤq-) }Jg '|ш"30 _RU-\-U}g)Dk kP tts3ǩ䯝8=-ZOcwr<r7")(vR3X-e 89Cğ? uk<pwZ9GgEUΏ'dvrvHۧ',MȒd6̇o[?2ٺ%zWyZZ1O1}>9Mjsj=߱ p|6ž[ /{ Bm@"]V폕\ c/J3j#C}rDn.1o=P&umn\W!-&cP-܁ϴMf--sLKwzvto7E}^ѝ@8d0:nUJp瞂5x[1ц$CE>G<Q T3uFG;wz+ZlQ~ گG7knO>o<<ҿ-xYKDt+|!|rv;% D_fiy5&bBc5|mym@2z2 QBW@9īto}JgpwX0L4!\vv)ZwX9÷Jϼa{ \X`!>J&֐9=\`{썫R_pN\:{yr/߷"/9tx=Ծk#ݎUyqѝ/3qցV-g:$pƎHF-UzH@uo쩚STB$IOԌaGכfLL'r;=zvaui*HYKyukKV8usՁ t\acw9-Q&ں'Z6!)x=M:װnEH,Ygͽк7 !#eKQ˼f񦛌P˛N +Ӽ7annd e%_s[Ag,@ xC[*c'D&lT]O-j "D'vL)Uu(|Ef=/~9o}TXorug+m>APm B9SgJ'c/T悛yݙ X>%C^cV8{o0{~V IV'Ed>nzLvՖ |K2UxVŶY#t߿ _LpdpfBRKSnq_Ccrx  f{/H&8/8.M{W|L@riAD&5 PF,  (I" A(2]Uٍ 6AyW}31Ph(=&*5/7Q aǤW&{W^J L-Pz\qUjՖ^JUMvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005327163115134224230017701 0ustar rootrootJan 21 17:32:52 crc systemd[1]: Starting Kubernetes Kubelet... Jan 21 17:32:52 crc restorecon[4675]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:52 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 21 17:32:53 crc restorecon[4675]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 21 17:32:53 crc kubenswrapper[4799]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 21 17:32:53 crc kubenswrapper[4799]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 21 17:32:53 crc kubenswrapper[4799]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 21 17:32:53 crc kubenswrapper[4799]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 21 17:32:53 crc kubenswrapper[4799]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 21 17:32:53 crc kubenswrapper[4799]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.675834 4799 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678644 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678666 4799 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678672 4799 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678680 4799 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678686 4799 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678691 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678696 4799 feature_gate.go:330] unrecognized feature gate: Example Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678701 4799 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678705 4799 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678709 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678712 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678716 4799 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678731 4799 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678735 4799 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678738 4799 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678742 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678746 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678750 4799 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678753 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678757 4799 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678760 4799 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678764 4799 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678767 4799 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678771 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678774 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678778 4799 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678781 4799 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678785 4799 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678788 4799 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678792 4799 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678796 4799 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678801 4799 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678806 4799 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678810 4799 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678815 4799 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.678878 4799 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679320 4799 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679367 4799 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679373 4799 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679378 4799 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679383 4799 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679389 4799 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679395 4799 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679399 4799 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679404 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679415 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679420 4799 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679426 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679442 4799 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679447 4799 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679452 4799 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679458 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679462 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679467 4799 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679471 4799 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679475 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679479 4799 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679484 4799 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679489 4799 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679493 4799 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679502 4799 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679507 4799 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679512 4799 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679518 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679524 4799 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679529 4799 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679533 4799 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679596 4799 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679606 4799 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679617 4799 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.679622 4799 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680247 4799 flags.go:64] FLAG: --address="0.0.0.0" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680325 4799 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680341 4799 flags.go:64] FLAG: --anonymous-auth="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680354 4799 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680366 4799 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680373 4799 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680381 4799 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680395 4799 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680403 4799 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680409 4799 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680416 4799 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680421 4799 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680428 4799 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680433 4799 flags.go:64] FLAG: --cgroup-root="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680438 4799 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680443 4799 flags.go:64] FLAG: --client-ca-file="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680528 4799 flags.go:64] FLAG: --cloud-config="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680665 4799 flags.go:64] FLAG: --cloud-provider="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680701 4799 flags.go:64] FLAG: --cluster-dns="[]" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680716 4799 flags.go:64] FLAG: --cluster-domain="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680722 4799 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680728 4799 flags.go:64] FLAG: --config-dir="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680733 4799 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680742 4799 flags.go:64] FLAG: --container-log-max-files="5" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680776 4799 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680782 4799 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680787 4799 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680793 4799 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680799 4799 flags.go:64] FLAG: --contention-profiling="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680806 4799 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680813 4799 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680820 4799 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680825 4799 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680834 4799 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680840 4799 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680846 4799 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680851 4799 flags.go:64] FLAG: --enable-load-reader="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680938 4799 flags.go:64] FLAG: --enable-server="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680943 4799 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680954 4799 flags.go:64] FLAG: --event-burst="100" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680960 4799 flags.go:64] FLAG: --event-qps="50" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680966 4799 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680987 4799 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.680992 4799 flags.go:64] FLAG: --eviction-hard="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681015 4799 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681021 4799 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681026 4799 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681032 4799 flags.go:64] FLAG: --eviction-soft="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681040 4799 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681047 4799 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681052 4799 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681057 4799 flags.go:64] FLAG: --experimental-mounter-path="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681063 4799 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681069 4799 flags.go:64] FLAG: --fail-swap-on="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681074 4799 flags.go:64] FLAG: --feature-gates="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681081 4799 flags.go:64] FLAG: --file-check-frequency="20s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681086 4799 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681091 4799 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681097 4799 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681104 4799 flags.go:64] FLAG: --healthz-port="10248" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681109 4799 flags.go:64] FLAG: --help="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681114 4799 flags.go:64] FLAG: --hostname-override="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681119 4799 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681145 4799 flags.go:64] FLAG: --http-check-frequency="20s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681150 4799 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681156 4799 flags.go:64] FLAG: --image-credential-provider-config="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681161 4799 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681166 4799 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681171 4799 flags.go:64] FLAG: --image-service-endpoint="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681176 4799 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681181 4799 flags.go:64] FLAG: --kube-api-burst="100" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681186 4799 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681191 4799 flags.go:64] FLAG: --kube-api-qps="50" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681196 4799 flags.go:64] FLAG: --kube-reserved="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681201 4799 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681206 4799 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681211 4799 flags.go:64] FLAG: --kubelet-cgroups="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681217 4799 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681222 4799 flags.go:64] FLAG: --lock-file="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681228 4799 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681234 4799 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681240 4799 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681251 4799 flags.go:64] FLAG: --log-json-split-stream="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681257 4799 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681267 4799 flags.go:64] FLAG: --log-text-split-stream="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681272 4799 flags.go:64] FLAG: --logging-format="text" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681278 4799 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681284 4799 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681289 4799 flags.go:64] FLAG: --manifest-url="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681295 4799 flags.go:64] FLAG: --manifest-url-header="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681306 4799 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681311 4799 flags.go:64] FLAG: --max-open-files="1000000" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681319 4799 flags.go:64] FLAG: --max-pods="110" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681324 4799 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681330 4799 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681335 4799 flags.go:64] FLAG: --memory-manager-policy="None" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681340 4799 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681347 4799 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681351 4799 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681357 4799 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681376 4799 flags.go:64] FLAG: --node-status-max-images="50" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681381 4799 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681386 4799 flags.go:64] FLAG: --oom-score-adj="-999" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681391 4799 flags.go:64] FLAG: --pod-cidr="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681395 4799 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681403 4799 flags.go:64] FLAG: --pod-manifest-path="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681407 4799 flags.go:64] FLAG: --pod-max-pids="-1" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681411 4799 flags.go:64] FLAG: --pods-per-core="0" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681416 4799 flags.go:64] FLAG: --port="10250" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681421 4799 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681426 4799 flags.go:64] FLAG: --provider-id="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681431 4799 flags.go:64] FLAG: --qos-reserved="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681435 4799 flags.go:64] FLAG: --read-only-port="10255" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681439 4799 flags.go:64] FLAG: --register-node="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681444 4799 flags.go:64] FLAG: --register-schedulable="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681448 4799 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681458 4799 flags.go:64] FLAG: --registry-burst="10" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681464 4799 flags.go:64] FLAG: --registry-qps="5" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681469 4799 flags.go:64] FLAG: --reserved-cpus="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681473 4799 flags.go:64] FLAG: --reserved-memory="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681479 4799 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681483 4799 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681488 4799 flags.go:64] FLAG: --rotate-certificates="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681492 4799 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681496 4799 flags.go:64] FLAG: --runonce="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681501 4799 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681505 4799 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681510 4799 flags.go:64] FLAG: --seccomp-default="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681514 4799 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681518 4799 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681523 4799 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681527 4799 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681532 4799 flags.go:64] FLAG: --storage-driver-password="root" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681536 4799 flags.go:64] FLAG: --storage-driver-secure="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681541 4799 flags.go:64] FLAG: --storage-driver-table="stats" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681546 4799 flags.go:64] FLAG: --storage-driver-user="root" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681550 4799 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681555 4799 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681560 4799 flags.go:64] FLAG: --system-cgroups="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681564 4799 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681572 4799 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681577 4799 flags.go:64] FLAG: --tls-cert-file="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681581 4799 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681591 4799 flags.go:64] FLAG: --tls-min-version="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681595 4799 flags.go:64] FLAG: --tls-private-key-file="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681599 4799 flags.go:64] FLAG: --topology-manager-policy="none" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681603 4799 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681608 4799 flags.go:64] FLAG: --topology-manager-scope="container" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681612 4799 flags.go:64] FLAG: --v="2" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681640 4799 flags.go:64] FLAG: --version="false" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681648 4799 flags.go:64] FLAG: --vmodule="" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681653 4799 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.681658 4799 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681808 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681814 4799 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681820 4799 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681826 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681830 4799 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681834 4799 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681838 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681842 4799 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681846 4799 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681850 4799 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681855 4799 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681860 4799 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681864 4799 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681868 4799 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681873 4799 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681877 4799 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681882 4799 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681885 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681891 4799 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681895 4799 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681900 4799 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681905 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681908 4799 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681912 4799 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681916 4799 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681920 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681924 4799 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681928 4799 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681945 4799 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681949 4799 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681953 4799 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681956 4799 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681960 4799 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681965 4799 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681969 4799 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681972 4799 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681976 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681983 4799 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681986 4799 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681990 4799 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681994 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.681997 4799 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682001 4799 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682004 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682008 4799 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682011 4799 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682015 4799 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682019 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682022 4799 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682026 4799 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682029 4799 feature_gate.go:330] unrecognized feature gate: Example Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682033 4799 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682037 4799 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682041 4799 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682045 4799 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682049 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682052 4799 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682055 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682059 4799 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682063 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682069 4799 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682073 4799 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682077 4799 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682081 4799 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682084 4799 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682088 4799 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682092 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682095 4799 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682099 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682103 4799 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.682107 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.682137 4799 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.689107 4799 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.689157 4799 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689232 4799 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689242 4799 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689249 4799 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689255 4799 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689259 4799 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689264 4799 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689270 4799 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689275 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689281 4799 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689287 4799 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689293 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689298 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689304 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689310 4799 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689317 4799 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689322 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689327 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689333 4799 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689337 4799 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689342 4799 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689347 4799 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689352 4799 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689357 4799 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689361 4799 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689366 4799 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689370 4799 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689375 4799 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689380 4799 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689384 4799 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689389 4799 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689393 4799 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689397 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689402 4799 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689406 4799 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689414 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689419 4799 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689423 4799 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689428 4799 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689433 4799 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689438 4799 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689442 4799 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689448 4799 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689453 4799 feature_gate.go:330] unrecognized feature gate: Example Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689458 4799 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689464 4799 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689472 4799 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689479 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689484 4799 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689488 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689494 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689498 4799 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689503 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689508 4799 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689513 4799 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689517 4799 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689522 4799 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689526 4799 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689531 4799 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689535 4799 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689539 4799 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689544 4799 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689548 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689553 4799 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689557 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689562 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689566 4799 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689571 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689575 4799 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689580 4799 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689584 4799 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689590 4799 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.689598 4799 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689779 4799 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689787 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689792 4799 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689796 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689800 4799 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689804 4799 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689809 4799 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689813 4799 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689819 4799 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689823 4799 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689827 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689831 4799 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689835 4799 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689839 4799 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689843 4799 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689847 4799 feature_gate.go:330] unrecognized feature gate: Example Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689851 4799 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689854 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689858 4799 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689862 4799 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689865 4799 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689869 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689872 4799 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689876 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689879 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689883 4799 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689887 4799 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689890 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689894 4799 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689898 4799 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689901 4799 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689907 4799 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689912 4799 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689916 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689921 4799 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689926 4799 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689930 4799 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689934 4799 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689941 4799 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689946 4799 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689951 4799 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689955 4799 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689959 4799 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689963 4799 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689967 4799 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689971 4799 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689975 4799 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689979 4799 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689983 4799 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689987 4799 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689991 4799 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689995 4799 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.689999 4799 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690002 4799 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690006 4799 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690010 4799 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690013 4799 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690018 4799 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690023 4799 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690027 4799 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690031 4799 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690035 4799 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690038 4799 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690043 4799 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690047 4799 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690051 4799 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690054 4799 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690059 4799 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690063 4799 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690067 4799 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 21 17:32:53 crc kubenswrapper[4799]: W0121 17:32:53.690072 4799 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.690080 4799 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.694156 4799 server.go:940] "Client rotation is on, will bootstrap in background" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.697314 4799 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.697411 4799 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.698276 4799 server.go:997] "Starting client certificate rotation" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.698320 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.698499 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-07 13:49:32.407595996 +0000 UTC Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.698553 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.911420 4799 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 21 17:32:53 crc kubenswrapper[4799]: E0121 17:32:53.913597 4799 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.954765 4799 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 21 17:32:53 crc kubenswrapper[4799]: I0121 17:32:53.972772 4799 log.go:25] "Validated CRI v1 runtime API" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.002166 4799 log.go:25] "Validated CRI v1 image API" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.003572 4799 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.006998 4799 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-21-17-27-46-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.007045 4799 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.025023 4799 manager.go:217] Machine: {Timestamp:2026-01-21 17:32:54.023572488 +0000 UTC m=+0.649862531 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10 BootID:933b271b-0519-400b-9412-6730db28e758 Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:bd:d1:2d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:bd:d1:2d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a3:2b:0c Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:e7:5d:21 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:b6:95:10 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:58:8e:66 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:6e:68:71:e6:51:2b Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:52:c5:b6:4a:02:1a Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.025292 4799 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.025551 4799 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.026323 4799 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.026543 4799 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.026595 4799 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.026891 4799 topology_manager.go:138] "Creating topology manager with none policy" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.026902 4799 container_manager_linux.go:303] "Creating device plugin manager" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.027113 4799 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.027175 4799 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.027565 4799 state_mem.go:36] "Initialized new in-memory state store" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.027664 4799 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.029091 4799 kubelet.go:418] "Attempting to sync node with API server" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.029117 4799 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.029281 4799 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.029301 4799 kubelet.go:324] "Adding apiserver pod source" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.029318 4799 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.039304 4799 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.039887 4799 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.040270 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.040306 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.040422 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.040361 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.040726 4799 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041364 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041386 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041394 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041401 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041418 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041427 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041435 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041447 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041457 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041464 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041477 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041488 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.041776 4799 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.042312 4799 server.go:1280] "Started kubelet" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.042660 4799 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 21 17:32:54 crc systemd[1]: Started Kubernetes Kubelet. Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.042667 4799 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.045481 4799 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.054524 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.057355 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.057624 4799 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.058726 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 12:59:42.795038397 +0000 UTC Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.058827 4799 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.057316 4799 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.177:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188ccf62227f4f78 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 17:32:54.042275704 +0000 UTC m=+0.668565727,LastTimestamp:2026-01-21 17:32:54.042275704 +0000 UTC m=+0.668565727,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.058934 4799 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.058952 4799 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.059333 4799 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.059847 4799 server.go:460] "Adding debug handlers to kubelet server" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.059975 4799 factory.go:55] Registering systemd factory Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.060006 4799 factory.go:221] Registration of the systemd container factory successfully Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.059668 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.060093 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.060359 4799 factory.go:153] Registering CRI-O factory Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.060379 4799 factory.go:221] Registration of the crio container factory successfully Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.060449 4799 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.060476 4799 factory.go:103] Registering Raw factory Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.060492 4799 manager.go:1196] Started watching for new ooms in manager Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.060892 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="200ms" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.061332 4799 manager.go:319] Starting recovery of all containers Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070477 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070536 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070548 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070557 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070566 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070574 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070582 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070591 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070601 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070610 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070618 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070627 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070640 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070651 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070659 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070667 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070677 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070689 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070699 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070711 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070724 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070735 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070744 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070753 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070762 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070773 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070787 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070797 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070807 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070817 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070847 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070856 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070866 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070890 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070899 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070908 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070917 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070926 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070934 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070944 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070954 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070981 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.070991 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071000 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071009 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071022 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071033 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071046 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071059 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071074 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071086 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071100 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071118 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071149 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071164 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071180 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071193 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071207 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071218 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071231 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071241 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071253 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071264 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071281 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071294 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071306 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071318 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071331 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071342 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071355 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071366 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071379 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071390 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071401 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071414 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071427 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071438 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071449 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071460 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071471 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071486 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071500 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071511 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071520 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071531 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071542 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071551 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071563 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071576 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071588 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071600 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071610 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071621 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071632 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071643 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071658 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071668 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071682 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071695 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071705 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071715 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071725 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071736 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071761 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071788 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071800 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071811 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071823 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071834 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071845 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071857 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071869 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071882 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071896 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071908 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071940 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071952 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071965 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071978 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.071993 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072005 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072017 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072029 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072042 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072056 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072067 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072079 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072094 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072157 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072172 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072186 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072200 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.072212 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073394 4799 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073466 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073485 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073498 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073512 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073524 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073533 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073544 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073555 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073570 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073586 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073601 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073614 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073625 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073635 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073644 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073660 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073673 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073686 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073697 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073707 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073716 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073726 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073750 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073765 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073794 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073802 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073814 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073825 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073835 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073846 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073859 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073875 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073893 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073903 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073912 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073923 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073936 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073945 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073958 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073968 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073978 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073988 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.073998 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074011 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074020 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074032 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074043 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074057 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074069 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074082 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074096 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074108 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074120 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074179 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074192 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074205 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074253 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074273 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074285 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074301 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074313 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074323 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074333 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074343 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074355 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074364 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074374 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074383 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074393 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074402 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074415 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074426 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074437 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074448 4799 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074457 4799 reconstruct.go:97] "Volume reconstruction finished" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.074465 4799 reconciler.go:26] "Reconciler: start to sync state" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.079064 4799 manager.go:324] Recovery completed Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.093394 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.160002 4799 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.198235 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.198321 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.198332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.200971 4799 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.201002 4799 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.201050 4799 state_mem.go:36] "Initialized new in-memory state store" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.201722 4799 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.203709 4799 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.203771 4799 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.203825 4799 kubelet.go:2335] "Starting kubelet main sync loop" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.203884 4799 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.205479 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.205594 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.238709 4799 policy_none.go:49] "None policy: Start" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.240173 4799 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.240238 4799 state_mem.go:35] "Initializing new in-memory state store" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.260342 4799 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.262612 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="400ms" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.294997 4799 manager.go:334] "Starting Device Plugin manager" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.295111 4799 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.295155 4799 server.go:79] "Starting device plugin registration server" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.295672 4799 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.295694 4799 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.296190 4799 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.296273 4799 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.296280 4799 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.303210 4799 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.304388 4799 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.304540 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.305480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.305512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.305522 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.305693 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306138 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306183 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306569 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306610 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306718 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306901 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306942 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.306954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307155 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307203 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307477 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307714 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307756 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307819 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.307852 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.308681 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.308716 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.308729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.308830 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.308964 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309008 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309403 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309610 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309905 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.309933 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.310562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.310614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.310624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.378968 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379017 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379041 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379059 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379078 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379091 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379106 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379120 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379153 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379167 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379184 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379200 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379216 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379229 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.379245 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.395912 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.401181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.401236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.401248 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.401283 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.402090 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481046 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481156 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481190 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481227 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481255 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481275 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481304 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481318 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481339 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481357 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481387 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481403 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481417 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.481432 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482017 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482062 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482079 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482110 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482110 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482150 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482181 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482169 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482211 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482214 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482238 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482237 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482259 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482265 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.482292 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.602231 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.603931 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.603982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.604020 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.604078 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.604847 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.658227 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: E0121 17:32:54.663829 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="800ms" Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.665980 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.683855 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-dee34ca084c651dd248209714b10080a6b2a373153e1bdf63144a7aba699482f WatchSource:0}: Error finding container dee34ca084c651dd248209714b10080a6b2a373153e1bdf63144a7aba699482f: Status 404 returned error can't find the container with id dee34ca084c651dd248209714b10080a6b2a373153e1bdf63144a7aba699482f Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.685468 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.688200 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-6dcc3f2bee55c8e895d7c773d693fb0f6acd785c43c6fa715d30bbe599d53b06 WatchSource:0}: Error finding container 6dcc3f2bee55c8e895d7c773d693fb0f6acd785c43c6fa715d30bbe599d53b06: Status 404 returned error can't find the container with id 6dcc3f2bee55c8e895d7c773d693fb0f6acd785c43c6fa715d30bbe599d53b06 Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.692745 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.696031 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-0e56ceeec99e8c572c45f45247493bb4a066fdcdde90994293bf20e71143504e WatchSource:0}: Error finding container 0e56ceeec99e8c572c45f45247493bb4a066fdcdde90994293bf20e71143504e: Status 404 returned error can't find the container with id 0e56ceeec99e8c572c45f45247493bb4a066fdcdde90994293bf20e71143504e Jan 21 17:32:54 crc kubenswrapper[4799]: I0121 17:32:54.696664 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.709878 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-56ecba884d8bdc5e9e2beab8995a7814287f412197fe51d66358dba9e7ee33e6 WatchSource:0}: Error finding container 56ecba884d8bdc5e9e2beab8995a7814287f412197fe51d66358dba9e7ee33e6: Status 404 returned error can't find the container with id 56ecba884d8bdc5e9e2beab8995a7814287f412197fe51d66358dba9e7ee33e6 Jan 21 17:32:54 crc kubenswrapper[4799]: W0121 17:32:54.720795 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2681fd3abbc95eac31df34cfee3e1fd6c3d272463dcbe9835273854b91fc975f WatchSource:0}: Error finding container 2681fd3abbc95eac31df34cfee3e1fd6c3d272463dcbe9835273854b91fc975f: Status 404 returned error can't find the container with id 2681fd3abbc95eac31df34cfee3e1fd6c3d272463dcbe9835273854b91fc975f Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.005266 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.085854 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 15:07:38.463712968 +0000 UTC Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.086293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.086330 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.086341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.086376 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:32:55 crc kubenswrapper[4799]: W0121 17:32:55.086572 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:55 crc kubenswrapper[4799]: E0121 17:32:55.086673 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.086611 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:55 crc kubenswrapper[4799]: E0121 17:32:55.086895 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.208039 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"56ecba884d8bdc5e9e2beab8995a7814287f412197fe51d66358dba9e7ee33e6"} Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.271031 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0e56ceeec99e8c572c45f45247493bb4a066fdcdde90994293bf20e71143504e"} Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.272990 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6dcc3f2bee55c8e895d7c773d693fb0f6acd785c43c6fa715d30bbe599d53b06"} Jan 21 17:32:55 crc kubenswrapper[4799]: W0121 17:32:55.273367 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:55 crc kubenswrapper[4799]: E0121 17:32:55.273553 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.276041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dee34ca084c651dd248209714b10080a6b2a373153e1bdf63144a7aba699482f"} Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.278874 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2681fd3abbc95eac31df34cfee3e1fd6c3d272463dcbe9835273854b91fc975f"} Jan 21 17:32:55 crc kubenswrapper[4799]: E0121 17:32:55.465737 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="1.6s" Jan 21 17:32:55 crc kubenswrapper[4799]: W0121 17:32:55.480311 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:55 crc kubenswrapper[4799]: E0121 17:32:55.480404 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:55 crc kubenswrapper[4799]: W0121 17:32:55.503690 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:55 crc kubenswrapper[4799]: E0121 17:32:55.503846 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.887939 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.890580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.890660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.890715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:55 crc kubenswrapper[4799]: I0121 17:32:55.890752 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:32:55 crc kubenswrapper[4799]: E0121 17:32:55.891696 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.003435 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 21 17:32:56 crc kubenswrapper[4799]: E0121 17:32:56.004533 4799 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.056007 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.086826 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 04:34:58.523587637 +0000 UTC Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.282926 4799 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af" exitCode=0 Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.283044 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.283498 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af"} Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.366579 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.366637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.366647 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.367915 4799 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8" exitCode=0 Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.367998 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8"} Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.368087 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.369263 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.369293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.369327 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.370564 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82"} Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.370657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194"} Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.372297 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056" exitCode=0 Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.372335 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056"} Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.372430 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.373345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.373386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.373398 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.374812 4799 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0" exitCode=0 Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.374838 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.374962 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.374853 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0"} Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.375763 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.375796 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.375809 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.376241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.376284 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:56 crc kubenswrapper[4799]: I0121 17:32:56.376302 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:56 crc kubenswrapper[4799]: W0121 17:32:56.895195 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:56 crc kubenswrapper[4799]: E0121 17:32:56.895417 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.055858 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:57 crc kubenswrapper[4799]: E0121 17:32:57.066929 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="3.2s" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.087996 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 09:52:58.964502008 +0000 UTC Jan 21 17:32:57 crc kubenswrapper[4799]: W0121 17:32:57.246881 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:57 crc kubenswrapper[4799]: E0121 17:32:57.247055 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.380541 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9bc3cd5709df489fdb1fe0890f905c648166df1a65093ecd00f15052c59e64e7"} Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.380719 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.381951 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.381989 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.382001 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.385019 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4"} Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.385064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d"} Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.387689 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5"} Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.387723 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea"} Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.387828 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.388822 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.388857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.388868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.391023 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d"} Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.392787 4799 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641" exitCode=0 Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.392819 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641"} Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.392905 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.393586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.393609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.393619 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.494212 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:57 crc kubenswrapper[4799]: W0121 17:32:57.495701 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:57 crc kubenswrapper[4799]: E0121 17:32:57.495803 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.496045 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.496102 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.496119 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:57 crc kubenswrapper[4799]: I0121 17:32:57.496187 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:32:57 crc kubenswrapper[4799]: E0121 17:32:57.496622 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.056021 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.484509 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.484642 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 18:19:44.426558031 +0000 UTC Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.488810 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616"} Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.510527 4799 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d" exitCode=0 Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.510588 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d"} Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.510749 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.512806 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.512846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.512869 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.513141 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.513656 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.513905 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5"} Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.514011 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.514261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.514281 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.514291 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.516344 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.516452 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.516477 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.519746 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.519830 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:58 crc kubenswrapper[4799]: I0121 17:32:58.519852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:58 crc kubenswrapper[4799]: W0121 17:32:58.522589 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:58 crc kubenswrapper[4799]: E0121 17:32:58.522718 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.156556 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.485098 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 13:17:24.386169393 +0000 UTC Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.574492 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194"} Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.574552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05"} Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.577716 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c075cb965f6955fc583d6acbfd34a5746a92b6f2f30f1ddfdbc59b271050ad7c"} Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.577804 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.577840 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.577969 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.579474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.579498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.579557 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.579572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.579519 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:32:59 crc kubenswrapper[4799]: I0121 17:32:59.579620 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.062221 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:33:00 crc kubenswrapper[4799]: E0121 17:33:00.062204 4799 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.177:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188ccf62227f4f78 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 17:32:54.042275704 +0000 UTC m=+0.668565727,LastTimestamp:2026-01-21 17:32:54.042275704 +0000 UTC m=+0.668565727,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 21 17:33:00 crc kubenswrapper[4799]: E0121 17:33:00.270868 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="6.4s" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.428860 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 21 17:33:00 crc kubenswrapper[4799]: E0121 17:33:00.430464 4799 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.507744 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 16:19:08.252093473 +0000 UTC Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.584221 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"63a941e69045d509e468eea5b2787e1e51e73c6b546a63214914823916f88c08"} Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.584726 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.585754 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.585888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.586012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.587415 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c90a67efcfae35a96cdfc2a14d3150b8872e5e46ff28468d5d1910d34041b6dc"} Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.587628 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.588516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.588668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.588754 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.696995 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.706751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.706952 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.707034 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:00 crc kubenswrapper[4799]: I0121 17:33:00.707178 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:33:00 crc kubenswrapper[4799]: E0121 17:33:00.707940 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.059906 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.332889 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.333098 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.334411 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.334442 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.334459 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.521816 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 07:24:28.095863387 +0000 UTC Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.590764 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.591607 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.686593 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.695922 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.695989 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.696053 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.768079 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.768311 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.769715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.769754 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.769765 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:01 crc kubenswrapper[4799]: I0121 17:33:01.777459 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.522282 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 05:36:33.693886158 +0000 UTC Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.605873 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"98274f97e4084169a2703e4c4eba552ea91868bfde19278d9365ce50d7a8cff2"} Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.605941 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"356e8e169444fda17290b5c8c7b6f741b075a104e1a7a247411bbcfb20b0ef4f"} Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.606035 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.606058 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.607482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.607533 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.607551 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.607837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.607883 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:02 crc kubenswrapper[4799]: I0121 17:33:02.607901 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.523383 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 11:30:52.630569574 +0000 UTC Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.617569 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d36b3ce4ad807a78bd8e7ffaa8e43beabdecde4f1e8b537d5b8e55c9cf05f67b"} Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.618055 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.619437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.619465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.619480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.759814 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.760382 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.762431 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.762495 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:03 crc kubenswrapper[4799]: I0121 17:33:03.762506 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:04 crc kubenswrapper[4799]: E0121 17:33:04.303430 4799 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 21 17:33:04 crc kubenswrapper[4799]: I0121 17:33:04.524554 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 09:07:41.946749889 +0000 UTC Jan 21 17:33:04 crc kubenswrapper[4799]: I0121 17:33:04.621166 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:04 crc kubenswrapper[4799]: I0121 17:33:04.622644 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:04 crc kubenswrapper[4799]: I0121 17:33:04.622697 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:04 crc kubenswrapper[4799]: I0121 17:33:04.622710 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:05 crc kubenswrapper[4799]: I0121 17:33:05.276626 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 21 17:33:05 crc kubenswrapper[4799]: I0121 17:33:05.510154 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 21 17:33:05 crc kubenswrapper[4799]: I0121 17:33:05.525291 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 07:53:00.751247868 +0000 UTC Jan 21 17:33:05 crc kubenswrapper[4799]: I0121 17:33:05.623533 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:05 crc kubenswrapper[4799]: I0121 17:33:05.624665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:05 crc kubenswrapper[4799]: I0121 17:33:05.624721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:05 crc kubenswrapper[4799]: I0121 17:33:05.624739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.526573 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 12:13:52.608380212 +0000 UTC Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.606630 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.606948 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.608898 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.608974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.608986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.615290 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.625836 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.625870 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.626997 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.627037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.627052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.627177 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.627203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:06 crc kubenswrapper[4799]: I0121 17:33:06.627213 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:07 crc kubenswrapper[4799]: I0121 17:33:07.109299 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:07 crc kubenswrapper[4799]: I0121 17:33:07.111200 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:07 crc kubenswrapper[4799]: I0121 17:33:07.111260 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:07 crc kubenswrapper[4799]: I0121 17:33:07.111277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:07 crc kubenswrapper[4799]: I0121 17:33:07.111325 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:33:07 crc kubenswrapper[4799]: I0121 17:33:07.527073 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 01:16:10.653527506 +0000 UTC Jan 21 17:33:08 crc kubenswrapper[4799]: I0121 17:33:08.528319 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 06:08:53.147786705 +0000 UTC Jan 21 17:33:08 crc kubenswrapper[4799]: I0121 17:33:08.958155 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 21 17:33:09 crc kubenswrapper[4799]: I0121 17:33:09.529046 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 14:35:35.848803658 +0000 UTC Jan 21 17:33:09 crc kubenswrapper[4799]: I0121 17:33:09.607858 4799 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 21 17:33:09 crc kubenswrapper[4799]: I0121 17:33:09.608038 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 21 17:33:10 crc kubenswrapper[4799]: I0121 17:33:10.529708 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 06:09:58.890637812 +0000 UTC Jan 21 17:33:11 crc kubenswrapper[4799]: I0121 17:33:11.556563 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 01:02:06.684259111 +0000 UTC Jan 21 17:33:11 crc kubenswrapper[4799]: W0121 17:33:11.565538 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 21 17:33:11 crc kubenswrapper[4799]: I0121 17:33:11.565723 4799 trace.go:236] Trace[1339224330]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Jan-2026 17:33:01.563) (total time: 10002ms): Jan 21 17:33:11 crc kubenswrapper[4799]: Trace[1339224330]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (17:33:11.565) Jan 21 17:33:11 crc kubenswrapper[4799]: Trace[1339224330]: [10.00219169s] [10.00219169s] END Jan 21 17:33:11 crc kubenswrapper[4799]: E0121 17:33:11.565772 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 21 17:33:11 crc kubenswrapper[4799]: W0121 17:33:11.845902 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 21 17:33:11 crc kubenswrapper[4799]: I0121 17:33:11.846092 4799 trace.go:236] Trace[223352153]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Jan-2026 17:33:01.844) (total time: 10002ms): Jan 21 17:33:11 crc kubenswrapper[4799]: Trace[223352153]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (17:33:11.845) Jan 21 17:33:11 crc kubenswrapper[4799]: Trace[223352153]: [10.002006219s] [10.002006219s] END Jan 21 17:33:11 crc kubenswrapper[4799]: E0121 17:33:11.846154 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.056784 4799 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 21 17:33:12 crc kubenswrapper[4799]: W0121 17:33:12.086285 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.086735 4799 trace.go:236] Trace[185092124]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Jan-2026 17:33:02.084) (total time: 10001ms): Jan 21 17:33:12 crc kubenswrapper[4799]: Trace[185092124]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (17:33:12.086) Jan 21 17:33:12 crc kubenswrapper[4799]: Trace[185092124]: [10.001784308s] [10.001784308s] END Jan 21 17:33:12 crc kubenswrapper[4799]: E0121 17:33:12.086881 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.557040 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 17:38:54.460086082 +0000 UTC Jan 21 17:33:12 crc kubenswrapper[4799]: W0121 17:33:12.607209 4799 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.607374 4799 trace.go:236] Trace[1745501520]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Jan-2026 17:33:02.605) (total time: 10002ms): Jan 21 17:33:12 crc kubenswrapper[4799]: Trace[1745501520]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (17:33:12.607) Jan 21 17:33:12 crc kubenswrapper[4799]: Trace[1745501520]: [10.00203886s] [10.00203886s] END Jan 21 17:33:12 crc kubenswrapper[4799]: E0121 17:33:12.607443 4799 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.646048 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.648847 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="63a941e69045d509e468eea5b2787e1e51e73c6b546a63214914823916f88c08" exitCode=255 Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.648929 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"63a941e69045d509e468eea5b2787e1e51e73c6b546a63214914823916f88c08"} Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.649319 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.650297 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.650366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.650383 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:12 crc kubenswrapper[4799]: I0121 17:33:12.651203 4799 scope.go:117] "RemoveContainer" containerID="63a941e69045d509e468eea5b2787e1e51e73c6b546a63214914823916f88c08" Jan 21 17:33:13 crc kubenswrapper[4799]: I0121 17:33:13.557631 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 04:33:19.056402808 +0000 UTC Jan 21 17:33:13 crc kubenswrapper[4799]: I0121 17:33:13.655027 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 21 17:33:13 crc kubenswrapper[4799]: I0121 17:33:13.657733 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0"} Jan 21 17:33:13 crc kubenswrapper[4799]: I0121 17:33:13.759672 4799 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 21 17:33:13 crc kubenswrapper[4799]: I0121 17:33:13.759779 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 21 17:33:14 crc kubenswrapper[4799]: E0121 17:33:14.303610 4799 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 21 17:33:14 crc kubenswrapper[4799]: I0121 17:33:14.558553 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 10:44:51.54879132 +0000 UTC Jan 21 17:33:14 crc kubenswrapper[4799]: I0121 17:33:14.661045 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:14 crc kubenswrapper[4799]: I0121 17:33:14.662460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:14 crc kubenswrapper[4799]: I0121 17:33:14.662497 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:14 crc kubenswrapper[4799]: I0121 17:33:14.662508 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:14 crc kubenswrapper[4799]: I0121 17:33:14.851495 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.508802 4799 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.508894 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.643097 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 20:20:25.358878067 +0000 UTC Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.643631 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.643816 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.656411 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.677666 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.677714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.677742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.698173 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.699438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.699482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:15 crc kubenswrapper[4799]: I0121 17:33:15.699494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:16 crc kubenswrapper[4799]: I0121 17:33:16.643733 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 16:50:23.456159082 +0000 UTC Jan 21 17:33:16 crc kubenswrapper[4799]: I0121 17:33:16.700625 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:16 crc kubenswrapper[4799]: I0121 17:33:16.701770 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:16 crc kubenswrapper[4799]: I0121 17:33:16.701807 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:16 crc kubenswrapper[4799]: I0121 17:33:16.701817 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:17 crc kubenswrapper[4799]: I0121 17:33:17.644672 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 06:18:22.320377756 +0000 UTC Jan 21 17:33:18 crc kubenswrapper[4799]: I0121 17:33:18.645818 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 05:07:04.600048058 +0000 UTC Jan 21 17:33:18 crc kubenswrapper[4799]: I0121 17:33:18.767469 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:18 crc kubenswrapper[4799]: I0121 17:33:18.767739 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:18 crc kubenswrapper[4799]: I0121 17:33:18.769303 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:18 crc kubenswrapper[4799]: I0121 17:33:18.769370 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:18 crc kubenswrapper[4799]: I0121 17:33:18.769382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:18 crc kubenswrapper[4799]: I0121 17:33:18.776241 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.381898 4799 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.584494 4799 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.608083 4799 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.608285 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.646680 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 04:02:41.846479015 +0000 UTC Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.710379 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.711420 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.711471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:19 crc kubenswrapper[4799]: I0121 17:33:19.711484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:20 crc kubenswrapper[4799]: E0121 17:33:20.485699 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="7s" Jan 21 17:33:20 crc kubenswrapper[4799]: I0121 17:33:20.490582 4799 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 21 17:33:20 crc kubenswrapper[4799]: E0121 17:33:20.490911 4799 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 21 17:33:20 crc kubenswrapper[4799]: I0121 17:33:20.507938 4799 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 21 17:33:20 crc kubenswrapper[4799]: I0121 17:33:20.687256 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:30:28.550698284 +0000 UTC Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.289695 4799 csr.go:261] certificate signing request csr-5fvh7 is approved, waiting to be issued Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.377582 4799 csr.go:257] certificate signing request csr-5fvh7 is issued Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.688404 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 05:37:46.145633725 +0000 UTC Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.716091 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.716741 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.718155 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0" exitCode=255 Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.718205 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0"} Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.718312 4799 scope.go:117] "RemoveContainer" containerID="63a941e69045d509e468eea5b2787e1e51e73c6b546a63214914823916f88c08" Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.718536 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.776353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.776433 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.776449 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:21 crc kubenswrapper[4799]: I0121 17:33:21.777584 4799 scope.go:117] "RemoveContainer" containerID="0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0" Jan 21 17:33:21 crc kubenswrapper[4799]: E0121 17:33:21.778298 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 21 17:33:22 crc kubenswrapper[4799]: I0121 17:33:22.482307 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-21 17:28:21 +0000 UTC, rotation deadline is 2026-10-05 06:56:44.599497153 +0000 UTC Jan 21 17:33:22 crc kubenswrapper[4799]: I0121 17:33:22.482387 4799 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6157h23m22.117115007s for next certificate rotation Jan 21 17:33:22 crc kubenswrapper[4799]: I0121 17:33:22.688907 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 13:11:45.611669875 +0000 UTC Jan 21 17:33:22 crc kubenswrapper[4799]: I0121 17:33:22.721516 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 21 17:33:23 crc kubenswrapper[4799]: I0121 17:33:23.689534 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 13:18:25.776750063 +0000 UTC Jan 21 17:33:23 crc kubenswrapper[4799]: I0121 17:33:23.701016 4799 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 21 17:33:24 crc kubenswrapper[4799]: E0121 17:33:24.304039 4799 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 21 17:33:24 crc kubenswrapper[4799]: I0121 17:33:24.690000 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 13:51:34.03849952 +0000 UTC Jan 21 17:33:24 crc kubenswrapper[4799]: I0121 17:33:24.756221 4799 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.260162 4799 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.334199 4799 apiserver.go:52] "Watching apiserver" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.355962 4799 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.356555 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-sl7lv","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-ovn-kubernetes/ovnkube-node-6qqjg","openshift-dns/node-resolver-hpm7v","openshift-machine-config-operator/machine-config-daemon-snc2s","openshift-multus/multus-additional-cni-plugins-bckxf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-image-registry/node-ca-85gfq"] Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.357297 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.357308 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.357399 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.357282 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.357718 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.357783 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.357815 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.357842 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.357928 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-hpm7v" Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.358106 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.358208 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.358687 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.358716 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.358889 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.358969 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.361730 4799 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.403893 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.403946 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.403978 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404017 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404059 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404084 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404111 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404149 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404169 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404198 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404219 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404243 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404276 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404298 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404336 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404356 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404389 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404422 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404450 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404481 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404517 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404541 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404754 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404785 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404827 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404851 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404875 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404899 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404931 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404960 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.404993 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.405024 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.405046 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.405066 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.473536 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.473553 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.473598 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.473690 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.473752 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.473610 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.474055 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.474289 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.474313 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.473451 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.474438 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.474788 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.474748 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.474853 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.474999 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.475076 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.475351 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.475388 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.475482 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.475568 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:33:25.975521679 +0000 UTC m=+32.601811702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.475883 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.475943 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476025 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476101 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476058 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476119 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476199 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476336 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476377 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476384 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476698 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476926 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.476948 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477055 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477171 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477215 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477293 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477333 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477546 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477576 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477654 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477828 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477853 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.478015 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.478287 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.478184 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.481120 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.481862 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.477381 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518069 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518234 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518299 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518323 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518429 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518461 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518483 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518410 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518508 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518592 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518676 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518718 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518715 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518991 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.519039 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.519378 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.518753 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.525395 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526247 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526318 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526352 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526382 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526411 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526456 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526557 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526620 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526654 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526682 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526715 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526741 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526633 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526770 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526772 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526792 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526825 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526866 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526893 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526930 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526959 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.526994 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527009 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527024 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527095 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527177 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527207 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527228 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527269 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527289 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527306 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527323 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527348 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527365 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527387 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527406 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527423 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527442 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527458 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527473 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527483 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.527490 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.528060 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.528340 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.528535 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.528734 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.528840 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.528898 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529186 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529385 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529565 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529603 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529743 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529782 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529925 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529931 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.530118 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.530194 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.530428 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.530581 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.530577 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.530719 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.530853 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.531055 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.529388 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532293 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532365 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532401 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532425 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532451 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532466 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532488 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532518 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532544 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532566 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532592 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532647 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532673 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532697 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532720 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532744 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532767 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532791 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532815 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532836 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532859 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532880 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532902 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532923 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.532986 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533014 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533037 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533066 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533093 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533152 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533181 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533206 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533232 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533260 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533253 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533289 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533314 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533298 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533350 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533375 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533397 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533421 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533444 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533481 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533501 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533525 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533548 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533570 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533594 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533616 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533663 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533697 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533745 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533767 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533792 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533814 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533839 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533870 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533897 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533927 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533955 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.533982 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534011 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534037 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534070 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534099 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534152 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534188 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534218 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534243 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534265 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534289 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534327 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534351 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534384 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534421 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534444 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534477 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534489 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534497 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534786 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534840 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534868 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.535817 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.536052 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.536232 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.536634 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.536823 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.537015 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.537189 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.537346 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.537521 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.537691 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.537880 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.538015 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.538180 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.538409 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.538604 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.538725 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.538843 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.546937 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.546944 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.547456 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.547830 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.547859 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.550126 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.550483 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.550518 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.548362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.548532 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.548898 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.549300 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.549485 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.549569 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.550210 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.551106 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.551188 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.551121 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.551746 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.551866 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.547933 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.552107 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.552231 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.552318 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.552532 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.552538 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.552589 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.553428 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.553780 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.553865 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.553899 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.553936 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554021 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554098 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554340 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554382 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554405 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554437 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554836 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554909 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.554969 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.555171 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.555487 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.555700 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.555872 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.556548 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.556717 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.556973 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.557019 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.560009 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.560387 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.560724 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.560993 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.560877 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.561090 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.561528 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.561723 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.562170 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.562260 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.562377 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.563069 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.563175 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.563313 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.563563 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.575819 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.575830 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.576892 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.577226 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.577392 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.577555 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.577999 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.578275 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.578500 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.587339 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.587579 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.587707 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.587728 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.587876 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588027 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588037 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588083 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588087 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.534503 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588390 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588420 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588444 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588467 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588492 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588516 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588538 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588619 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588643 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588662 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588683 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588704 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588729 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588749 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588773 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588792 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588813 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588837 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588858 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588880 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588903 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588929 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588950 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.588970 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589000 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589027 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589053 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589192 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589231 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fe826811-0f15-453a-9849-dae49637b629-cni-binary-copy\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589266 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljvgk\" (UniqueName: \"kubernetes.io/projected/fe826811-0f15-453a-9849-dae49637b629-kube-api-access-ljvgk\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589306 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589327 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tscm\" (UniqueName: \"kubernetes.io/projected/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-kube-api-access-7tscm\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589355 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-rootfs\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589395 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589416 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-os-release\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589435 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/fe826811-0f15-453a-9849-dae49637b629-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589460 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589483 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-bin\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589503 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-cnibin\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589522 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-env-overrides\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589544 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589565 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-cni-bin\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589599 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-conf-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589618 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-slash\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589637 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6770819e-2fef-4203-9c5f-504628af7b66-ovn-node-metrics-cert\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589661 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-k8s-cni-cncf-io\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589682 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-var-lib-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589700 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-node-log\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589723 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-config\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589744 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589763 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-netns\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589785 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589805 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-system-cni-dir\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589823 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589843 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-cni-multus\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589863 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-kubelet\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589883 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-daemon-config\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589901 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-etc-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589946 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589967 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3004f2e1-bd6a-46a1-a6d9-835472f616b8-cni-binary-copy\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.589992 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590012 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-host\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590031 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-mcd-auth-proxy-config\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590050 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590070 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590090 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-script-lib\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590116 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8s9k\" (UniqueName: \"kubernetes.io/projected/6770819e-2fef-4203-9c5f-504628af7b66-kube-api-access-p8s9k\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590157 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590179 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-netd\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590199 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-cnibin\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590226 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-multus-certs\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590267 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590297 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8gxm\" (UniqueName: \"kubernetes.io/projected/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-kube-api-access-z8gxm\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590317 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-systemd\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590342 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4f19e394-f753-4802-a65b-a2d461af624b-hosts-file\") pod \"node-resolver-hpm7v\" (UID: \"4f19e394-f753-4802-a65b-a2d461af624b\") " pod="openshift-dns/node-resolver-hpm7v" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590359 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4tcn\" (UniqueName: \"kubernetes.io/projected/3004f2e1-bd6a-46a1-a6d9-835472f616b8-kube-api-access-p4tcn\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590377 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-log-socket\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590398 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590415 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-ovn\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590433 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-ovn-kubernetes\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590450 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-proxy-tls\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590471 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590487 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-cni-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590502 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-socket-dir-parent\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590519 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-hostroot\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590537 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590558 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-kubelet\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590574 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-system-cni-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590591 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-serviceca\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590607 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th2t8\" (UniqueName: \"kubernetes.io/projected/4f19e394-f753-4802-a65b-a2d461af624b-kube-api-access-th2t8\") pod \"node-resolver-hpm7v\" (UID: \"4f19e394-f753-4802-a65b-a2d461af624b\") " pod="openshift-dns/node-resolver-hpm7v" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590621 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-os-release\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590637 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-etc-kubernetes\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590653 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-systemd-units\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590668 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-netns\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590749 4799 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590762 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590772 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590784 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590794 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590803 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590812 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590822 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590831 4799 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590845 4799 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590853 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590870 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590879 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590891 4799 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590901 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590909 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590919 4799 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590928 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590937 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590946 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590956 4799 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590966 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590977 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.590989 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591000 4799 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591009 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591020 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591031 4799 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591040 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591051 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591060 4799 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591071 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591081 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591093 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591102 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591112 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591122 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591149 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591160 4799 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591171 4799 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591183 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591194 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591205 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591215 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591225 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591234 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591243 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591254 4799 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591265 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591274 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591284 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591293 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591305 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591322 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591331 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591341 4799 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591350 4799 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591361 4799 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591370 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591380 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591391 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591402 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591413 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591429 4799 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591438 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591448 4799 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591457 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591467 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591476 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591486 4799 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591495 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591505 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591514 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591524 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591533 4799 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591542 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591551 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591561 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591570 4799 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591580 4799 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591589 4799 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591598 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591609 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591622 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591636 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591651 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591663 4799 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591674 4799 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591683 4799 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591692 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591702 4799 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591711 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591730 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591744 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591756 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591771 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591782 4799 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591796 4799 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591809 4799 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591822 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591835 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591848 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591860 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591871 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591880 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591890 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591903 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591915 4799 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591928 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591940 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591953 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591966 4799 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591976 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591985 4799 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.591999 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592011 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592024 4799 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592039 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592051 4799 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592063 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592075 4799 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592087 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592100 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592111 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592410 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592658 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592983 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.593648 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.594078 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.594305 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:26.094276042 +0000 UTC m=+32.720566245 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.598726 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.599177 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.599411 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.599669 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.602669 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.602696 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.604213 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.605303 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.606150 4799 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.592124 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607569 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607593 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607611 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607624 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607648 4799 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607661 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607674 4799 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607688 4799 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607704 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607721 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607743 4799 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607759 4799 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607772 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607784 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607797 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607811 4799 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607823 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607835 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607846 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.607857 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.608299 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.608554 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.608746 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.608773 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.608859 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:26.108835125 +0000 UTC m=+32.735125148 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.608859 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.609217 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.609369 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.609574 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.609852 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.609894 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.610074 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.610203 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.610302 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.610478 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.610482 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.610509 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.610860 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.611722 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.613172 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.613260 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.613360 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.613568 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.612108 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.617366 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.621449 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.621969 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.622422 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.622743 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.624921 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.626337 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.626363 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.626381 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.626461 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:26.126433724 +0000 UTC m=+32.752723757 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.630466 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.630501 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.632295 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.632517 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.634071 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.634789 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.637300 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.637412 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.637524 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.637685 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.643401 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.649399 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.652166 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.652530 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.656795 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.665453 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.665497 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.665511 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:25 crc kubenswrapper[4799]: E0121 17:33:25.665586 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:26.16556 +0000 UTC m=+32.791850023 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.672434 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.674313 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.680735 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.680827 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.683901 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.692377 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 03:37:30.131834763 +0000 UTC Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.693724 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708683 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-serviceca\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708744 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th2t8\" (UniqueName: \"kubernetes.io/projected/4f19e394-f753-4802-a65b-a2d461af624b-kube-api-access-th2t8\") pod \"node-resolver-hpm7v\" (UID: \"4f19e394-f753-4802-a65b-a2d461af624b\") " pod="openshift-dns/node-resolver-hpm7v" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708771 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-os-release\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708795 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-systemd-units\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708815 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-netns\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708838 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-etc-kubernetes\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708862 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708884 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tscm\" (UniqueName: \"kubernetes.io/projected/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-kube-api-access-7tscm\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708911 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fe826811-0f15-453a-9849-dae49637b629-cni-binary-copy\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708931 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljvgk\" (UniqueName: \"kubernetes.io/projected/fe826811-0f15-453a-9849-dae49637b629-kube-api-access-ljvgk\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708954 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/fe826811-0f15-453a-9849-dae49637b629-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708976 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-bin\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.708995 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-rootfs\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709016 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-os-release\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709039 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-env-overrides\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709071 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-cnibin\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709091 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-slash\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709113 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6770819e-2fef-4203-9c5f-504628af7b66-ovn-node-metrics-cert\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709036 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-os-release\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709161 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-k8s-cni-cncf-io\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709218 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-k8s-cni-cncf-io\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709278 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-systemd-units\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709268 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-cni-bin\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709311 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-netns\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709336 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-conf-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709345 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-etc-kubernetes\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709376 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709370 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-netns\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709424 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709442 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-var-lib-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709480 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-node-log\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709497 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-config\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709513 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-cni-multus\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709529 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-kubelet\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709544 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-daemon-config\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709563 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-system-cni-dir\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709581 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709621 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-host\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709646 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-etc-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709663 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3004f2e1-bd6a-46a1-a6d9-835472f616b8-cni-binary-copy\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709679 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-mcd-auth-proxy-config\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709697 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709717 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-netd\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709735 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709756 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-script-lib\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709775 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8s9k\" (UniqueName: \"kubernetes.io/projected/6770819e-2fef-4203-9c5f-504628af7b66-kube-api-access-p8s9k\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709795 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8gxm\" (UniqueName: \"kubernetes.io/projected/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-kube-api-access-z8gxm\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709814 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-cnibin\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709830 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-multus-certs\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709827 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-kubelet\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-systemd\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709865 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4f19e394-f753-4802-a65b-a2d461af624b-hosts-file\") pod \"node-resolver-hpm7v\" (UID: \"4f19e394-f753-4802-a65b-a2d461af624b\") " pod="openshift-dns/node-resolver-hpm7v" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709882 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-log-socket\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709903 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4tcn\" (UniqueName: \"kubernetes.io/projected/3004f2e1-bd6a-46a1-a6d9-835472f616b8-kube-api-access-p4tcn\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709923 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-ovn\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709940 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-ovn-kubernetes\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709965 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-proxy-tls\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.709988 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-kubelet\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710002 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-system-cni-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710025 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-var-lib-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710076 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-node-log\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710072 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-conf-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710103 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-os-release\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710149 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-cni-bin\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710172 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710268 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-netns\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-var-lib-cni-multus\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710334 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-bin\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710433 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-serviceca\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710559 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-rootfs\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710796 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-log-socket\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.710977 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-cnibin\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711015 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-host-run-multus-certs\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711047 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-systemd\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711079 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4f19e394-f753-4802-a65b-a2d461af624b-hosts-file\") pod \"node-resolver-hpm7v\" (UID: \"4f19e394-f753-4802-a65b-a2d461af624b\") " pod="openshift-dns/node-resolver-hpm7v" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711124 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-config\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711174 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-cni-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711226 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-socket-dir-parent\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711244 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-hostroot\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711286 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711286 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-daemon-config\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711297 4799 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711308 4799 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711319 4799 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711329 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711338 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711348 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711357 4799 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711367 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711376 4799 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711385 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711389 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-tuning-conf-dir\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711395 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711422 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711426 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-system-cni-dir\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711431 4799 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711442 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711451 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711462 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711467 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711450 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-netd\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711471 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711498 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711508 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711517 4799 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711527 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711537 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711546 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711555 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711565 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711574 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711583 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711629 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-socket-dir-parent\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711654 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-hostroot\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711678 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-host\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711705 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-ovn-kubernetes\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711592 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711734 4799 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711746 4799 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711756 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711768 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711778 4799 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711788 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711798 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711808 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711817 4799 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711827 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711836 4799 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711846 4799 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711857 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711868 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711878 4799 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711884 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-etc-openvswitch\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711887 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711911 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711912 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-kubelet\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711921 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711933 4799 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711943 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711942 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-mcd-auth-proxy-config\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711866 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-ovn\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711970 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fe826811-0f15-453a-9849-dae49637b629-cnibin\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.711938 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.712017 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.712189 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-system-cni-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.712203 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-slash\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.712374 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-env-overrides\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.712395 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3004f2e1-bd6a-46a1-a6d9-835472f616b8-multus-cni-dir\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.712818 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-script-lib\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.714491 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/fe826811-0f15-453a-9849-dae49637b629-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.721898 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3004f2e1-bd6a-46a1-a6d9-835472f616b8-cni-binary-copy\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.722476 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-proxy-tls\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.722861 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fe826811-0f15-453a-9849-dae49637b629-cni-binary-copy\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.729384 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6770819e-2fef-4203-9c5f-504628af7b66-ovn-node-metrics-cert\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.731667 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tscm\" (UniqueName: \"kubernetes.io/projected/a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee-kube-api-access-7tscm\") pod \"node-ca-85gfq\" (UID: \"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\") " pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.731945 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljvgk\" (UniqueName: \"kubernetes.io/projected/fe826811-0f15-453a-9849-dae49637b629-kube-api-access-ljvgk\") pod \"multus-additional-cni-plugins-bckxf\" (UID: \"fe826811-0f15-453a-9849-dae49637b629\") " pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.732281 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8gxm\" (UniqueName: \"kubernetes.io/projected/3a9a6c57-0a82-4115-b895-c414b0cc6a3b-kube-api-access-z8gxm\") pod \"machine-config-daemon-snc2s\" (UID: \"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\") " pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.732465 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8s9k\" (UniqueName: \"kubernetes.io/projected/6770819e-2fef-4203-9c5f-504628af7b66-kube-api-access-p8s9k\") pod \"ovnkube-node-6qqjg\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.733478 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4tcn\" (UniqueName: \"kubernetes.io/projected/3004f2e1-bd6a-46a1-a6d9-835472f616b8-kube-api-access-p4tcn\") pod \"multus-sl7lv\" (UID: \"3004f2e1-bd6a-46a1-a6d9-835472f616b8\") " pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.747923 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.757010 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.769075 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.772338 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th2t8\" (UniqueName: \"kubernetes.io/projected/4f19e394-f753-4802-a65b-a2d461af624b-kube-api-access-th2t8\") pod \"node-resolver-hpm7v\" (UID: \"4f19e394-f753-4802-a65b-a2d461af624b\") " pod="openshift-dns/node-resolver-hpm7v" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.773016 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6039cb8ae47ad9056f986bfba61f3e6ef287c1704ef869f4917d5281cf66cc22"} Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.784074 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.796978 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.817112 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.819025 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 21 17:33:25 crc kubenswrapper[4799]: W0121 17:33:25.832617 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-6f0890df4d54a4263c17652a759a81c5be2d82114c24a05f9df336a7866f6899 WatchSource:0}: Error finding container 6f0890df4d54a4263c17652a759a81c5be2d82114c24a05f9df336a7866f6899: Status 404 returned error can't find the container with id 6f0890df4d54a4263c17652a759a81c5be2d82114c24a05f9df336a7866f6899 Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.834512 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.847963 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.859571 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.942718 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-sl7lv" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.943867 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-hpm7v" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.947357 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.969664 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:33:25 crc kubenswrapper[4799]: I0121 17:33:25.999117 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-bckxf" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.004370 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-85gfq" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.017425 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.017740 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:33:27.017705734 +0000 UTC m=+33.643995767 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.022174 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:26 crc kubenswrapper[4799]: W0121 17:33:26.024322 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a9a6c57_0a82_4115_b895_c414b0cc6a3b.slice/crio-12d5e48e2898e5d1bb028264cc95333ae7a0b930f5b618acd0cd3b3eaa835baf WatchSource:0}: Error finding container 12d5e48e2898e5d1bb028264cc95333ae7a0b930f5b618acd0cd3b3eaa835baf: Status 404 returned error can't find the container with id 12d5e48e2898e5d1bb028264cc95333ae7a0b930f5b618acd0cd3b3eaa835baf Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.118530 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.118616 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.118689 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:27.118667873 +0000 UTC m=+33.744957906 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.118620 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.118732 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.118872 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:27.118857537 +0000 UTC m=+33.745147560 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:26 crc kubenswrapper[4799]: W0121 17:33:26.135853 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4e77c27_c6f5_4b6b_a8fd_4595ea0a58ee.slice/crio-87967b153607fa2d220a5b4f74eecefc6c937c9557423d46b08694a901f027a2 WatchSource:0}: Error finding container 87967b153607fa2d220a5b4f74eecefc6c937c9557423d46b08694a901f027a2: Status 404 returned error can't find the container with id 87967b153607fa2d220a5b4f74eecefc6c937c9557423d46b08694a901f027a2 Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.353731 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.353819 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.354016 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.354042 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.354059 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.354171 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:27.354143507 +0000 UTC m=+33.980433530 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.354247 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.354261 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.354269 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:26 crc kubenswrapper[4799]: E0121 17:33:26.354297 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:27.35428809 +0000 UTC m=+33.980578123 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.365249 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.367543 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.371028 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.372930 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.376366 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.377298 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.379029 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.380423 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.381397 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.383958 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.384695 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.387013 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.387721 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.388628 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.390016 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.390693 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.392510 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.392927 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.393558 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.395008 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.395577 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.396862 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.398278 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.399181 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.399578 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.400680 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.401476 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.402395 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.403069 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.407931 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.409020 4799 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.409190 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.411452 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.412460 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.412977 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.415056 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.416582 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.420094 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.420989 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.422147 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.422768 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.424520 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.425739 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.427065 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.429031 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.429963 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.430524 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.431661 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.432150 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.433105 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.433637 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.434838 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.435756 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.436436 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.611322 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.617200 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.625539 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.627562 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.639688 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.654392 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.666201 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.682631 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.693037 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 01:48:00.455028938 +0000 UTC Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.696787 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.718106 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.731019 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.771592 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.822653 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerStarted","Data":"d3740179cc8a2a55ce45966b0da7e4fbc1cd78c2dc78a53222c30b806f715111"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.823947 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-sl7lv" event={"ID":"3004f2e1-bd6a-46a1-a6d9-835472f616b8","Type":"ContainerStarted","Data":"009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.823986 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-sl7lv" event={"ID":"3004f2e1-bd6a-46a1-a6d9-835472f616b8","Type":"ContainerStarted","Data":"2751693baed3083176099fad72c26450ec23111481333f1c9bae2ddd240652bc"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.825348 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"6f0890df4d54a4263c17652a759a81c5be2d82114c24a05f9df336a7866f6899"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.826921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-85gfq" event={"ID":"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee","Type":"ContainerStarted","Data":"87967b153607fa2d220a5b4f74eecefc6c937c9557423d46b08694a901f027a2"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.829109 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.829164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ad42034292ba1091ce962018b12f29273d5a90077701d96323c40ff50be53cbe"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.830896 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-hpm7v" event={"ID":"4f19e394-f753-4802-a65b-a2d461af624b","Type":"ContainerStarted","Data":"6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.830929 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-hpm7v" event={"ID":"4f19e394-f753-4802-a65b-a2d461af624b","Type":"ContainerStarted","Data":"3042d7abea24a3ab5403149da8f679276262d4fea04729c3e7df2cc3a36ca9b7"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.832587 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.832623 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.833878 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.833975 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"12d5e48e2898e5d1bb028264cc95333ae7a0b930f5b618acd0cd3b3eaa835baf"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.835067 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c" exitCode=0 Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.835569 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.835600 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"e2ab094313baa4b16aae016079be135b984e61b988de75fb270b6d3572c1064f"} Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.850378 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.884957 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.904566 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.919919 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:26 crc kubenswrapper[4799]: I0121 17:33:26.939477 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:26Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.033282 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.079840 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.080152 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:33:29.080085384 +0000 UTC m=+35.706375407 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.087753 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.102256 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.115048 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.129914 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.157982 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.173516 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.180873 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.180933 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.180991 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.181060 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.181068 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:29.181050833 +0000 UTC m=+35.807340856 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.181117 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:29.181105814 +0000 UTC m=+35.807395837 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.202668 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.204702 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.204818 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.204842 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.204963 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.205389 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.205460 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.223252 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.245294 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.258621 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.382890 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.382975 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.383144 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.383148 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.383215 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.383230 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.383163 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.383322 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.383306 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:29.383287338 +0000 UTC m=+36.009577361 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.383434 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:29.383417431 +0000 UTC m=+36.009707454 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.491397 4799 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.494168 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.494234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.494252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.494428 4799 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.520314 4799 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.520764 4799 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.522474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.522513 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.522526 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.522546 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.522574 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:27Z","lastTransitionTime":"2026-01-21T17:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.645912 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.660066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.660502 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.660511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.660529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.660546 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:27Z","lastTransitionTime":"2026-01-21T17:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.681665 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.762040 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 18:13:19.453179065 +0000 UTC Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.765540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.765687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.765768 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.765875 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.765957 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:27Z","lastTransitionTime":"2026-01-21T17:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.783406 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.788805 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.788857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.788869 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.788888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.788902 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:27Z","lastTransitionTime":"2026-01-21T17:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.825623 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.832091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.832142 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.832153 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.832178 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.832190 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:27Z","lastTransitionTime":"2026-01-21T17:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.841728 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe826811-0f15-453a-9849-dae49637b629" containerID="017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49" exitCode=0 Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.841801 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerDied","Data":"017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.848269 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-85gfq" event={"ID":"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee","Type":"ContainerStarted","Data":"8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.851015 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.859461 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.859520 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.859533 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.859546 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.865644 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: E0121 17:33:27.865832 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.871022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.871053 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.871064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.871084 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.871099 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:27Z","lastTransitionTime":"2026-01-21T17:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.899655 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.953318 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.976800 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.976848 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.976857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.976880 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.976891 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:27Z","lastTransitionTime":"2026-01-21T17:33:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.977596 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:27 crc kubenswrapper[4799]: I0121 17:33:27.992239 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:27Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.014182 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.030077 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.049460 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.064870 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.085528 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.088341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.088394 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.088412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.088433 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.088446 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.106007 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.130498 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.147537 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.164141 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.183974 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.191516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.191568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.191579 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.191601 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.191614 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.198898 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.218561 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.232634 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.250476 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.264629 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.279382 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.294403 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.294465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.294496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.294514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.294528 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.296458 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.315490 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.331383 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.343552 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.380362 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.392008 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.397051 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.397117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.397150 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.397173 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.397187 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.501035 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.501505 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.501520 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.501549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.501566 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.604272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.604315 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.604326 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.604347 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.604360 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.706311 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.706641 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.706749 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.706876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.706973 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.762992 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 15:32:51.901477447 +0000 UTC Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.811389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.811438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.811451 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.811470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.811481 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.932268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.932350 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.932365 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.932389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.932409 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:28Z","lastTransitionTime":"2026-01-21T17:33:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.955736 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.955817 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} Jan 21 17:33:28 crc kubenswrapper[4799]: I0121 17:33:28.977977 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerStarted","Data":"4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.000846 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:28Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.019415 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.036062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.036107 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.036120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.036162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.036208 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.036953 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.053026 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.080740 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.081685 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:33:33.081640183 +0000 UTC m=+39.707930206 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.086260 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.105017 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.119074 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.160219 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.160289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.160305 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.160339 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.160353 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.161419 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.176711 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.181666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.181719 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.181871 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.181987 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.181999 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:33.181974896 +0000 UTC m=+39.808264909 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.182141 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:33.182096699 +0000 UTC m=+39.808386912 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.192372 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.205076 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.205174 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.205285 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.205209 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.205446 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.205517 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.209880 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.225005 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.238389 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.263181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.263260 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.263271 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.263306 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.263315 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.366607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.366695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.366714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.366744 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.366761 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.384616 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.385172 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.385202 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.385217 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.385297 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:33.385275557 +0000 UTC m=+40.011565580 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.385856 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.385883 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.385901 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.385948 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:33.385929384 +0000 UTC m=+40.012219407 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.384976 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.469636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.469684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.469695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.469714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.469726 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.572563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.572611 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.572623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.572643 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.572656 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.675482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.675519 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.675530 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.675548 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.675560 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.765639 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 17:34:26.948610212 +0000 UTC Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.781119 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.781180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.781191 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.781210 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.781222 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.867745 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.884297 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.884342 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.884353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.884381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.884392 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.886436 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.900797 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.915701 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.934077 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.962029 4799 scope.go:117] "RemoveContainer" containerID="0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0" Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.962929 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.963293 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.963281 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.976494 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.980183 4799 scope.go:117] "RemoveContainer" containerID="0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0" Jan 21 17:33:29 crc kubenswrapper[4799]: E0121 17:33:29.980321 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.988277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.988333 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.988346 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.988366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:29 crc kubenswrapper[4799]: I0121 17:33:29.988379 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:29Z","lastTransitionTime":"2026-01-21T17:33:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.003004 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.019256 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.054810 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.091320 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.091366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.091380 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.091398 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.091409 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.117457 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.135266 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.149217 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.165876 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:30Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.194073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.194172 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.194187 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.194217 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.194232 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.298922 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.298993 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.299007 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.299030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.299044 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.402416 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.402484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.402496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.402517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.402532 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.506617 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.506689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.506709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.506731 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.506744 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.610710 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.611158 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.611252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.611353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.611443 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.713986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.714052 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.714077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.714106 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.714120 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.766496 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 17:57:58.911825992 +0000 UTC Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.817583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.817659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.817673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.817692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.817708 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.920322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.920383 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.920401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.920422 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.920437 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:30Z","lastTransitionTime":"2026-01-21T17:33:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.989521 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe826811-0f15-453a-9849-dae49637b629" containerID="4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b" exitCode=0 Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.989585 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerDied","Data":"4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b"} Jan 21 17:33:30 crc kubenswrapper[4799]: I0121 17:33:30.996584 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.007494 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.025335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.025378 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.025389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.025411 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.025392 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.025424 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.036614 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.052431 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.066747 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.085865 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.100367 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.113875 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.127411 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.130382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.130432 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.130443 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.130462 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.130475 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.144895 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.162050 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.177754 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.193179 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.204187 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.204269 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.204337 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:31 crc kubenswrapper[4799]: E0121 17:33:31.204402 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:31 crc kubenswrapper[4799]: E0121 17:33:31.204624 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:31 crc kubenswrapper[4799]: E0121 17:33:31.204775 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.217624 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:31Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.235599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.235671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.235688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.235711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.235726 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.338994 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.339033 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.339042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.339059 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.339071 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.441556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.441609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.441618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.441639 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.441658 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.543633 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.543689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.543702 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.543723 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.543735 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.646700 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.646757 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.646773 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.646791 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.646803 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.749978 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.750035 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.750044 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.750073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.750085 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.767617 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 05:19:53.172884576 +0000 UTC Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.853679 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.853738 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.853751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.853775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.853788 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.957343 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.957409 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.957420 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.957438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:31 crc kubenswrapper[4799]: I0121 17:33:31.957450 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:31Z","lastTransitionTime":"2026-01-21T17:33:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.003505 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe826811-0f15-453a-9849-dae49637b629" containerID="cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610" exitCode=0 Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.003581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerDied","Data":"cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.020698 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.038895 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.056689 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.059757 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.059806 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.059816 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.059835 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.059848 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.071092 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.087706 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.101072 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.122492 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.138979 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.153435 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.165765 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.165821 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.165834 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.165856 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.165869 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.173748 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.190524 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.206683 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.222925 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.248547 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:32Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.270022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.270081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.270098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.270121 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.270144 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.374016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.374089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.374104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.374152 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.374168 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.476629 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.476664 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.476672 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.476690 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.476703 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.585400 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.585451 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.585461 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.585480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.585490 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.688395 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.688852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.688997 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.689073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.689149 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.768478 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 19:25:51.539189536 +0000 UTC Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.794003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.794049 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.794057 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.794075 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.794097 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.898762 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.898801 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.898813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.899022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:32 crc kubenswrapper[4799]: I0121 17:33:32.899036 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:32Z","lastTransitionTime":"2026-01-21T17:33:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.037189 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.037220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.037229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.037245 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.037255 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.039668 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.042819 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerDied","Data":"26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.042922 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe826811-0f15-453a-9849-dae49637b629" containerID="26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92" exitCode=0 Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.051657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.052000 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.052112 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.052407 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.058190 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.078606 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.083914 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.094353 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.112078 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.122154 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.129531 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.141463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.141500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.141511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.141527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.141535 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.144054 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.158028 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.158844 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:33:41.158819098 +0000 UTC m=+47.785109121 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.167447 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.223473 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.223642 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.224052 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.224121 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.224197 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.224271 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.225511 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.243622 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.254710 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.254757 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.254767 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.254785 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.254794 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.259156 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.259334 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.259572 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.259706 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:41.259688214 +0000 UTC m=+47.885978237 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.260550 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.260666 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:41.260653108 +0000 UTC m=+47.886943131 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.260984 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.269985 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.282460 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.294495 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.309652 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.324317 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.340239 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.358098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.358417 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.358491 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.358573 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.358701 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.365778 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.383557 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.399854 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.413617 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.434180 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.447096 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.461362 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.462101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.462264 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.462314 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.462399 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.462492 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.462521 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.462537 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.462583 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:41.462569575 +0000 UTC m=+48.088859598 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.462638 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.462650 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.462657 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:33 crc kubenswrapper[4799]: E0121 17:33:33.462677 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:41.462670787 +0000 UTC m=+48.088960810 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.462500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.462713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.462729 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.475464 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.489491 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.503336 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.516709 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.527444 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:33Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.564969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.565006 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.565016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.565034 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.565046 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.667492 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.667565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.667577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.667594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.667607 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.768709 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 11:04:38.018103481 +0000 UTC Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.770444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.770501 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.770513 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.770531 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.770542 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.873944 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.873986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.873996 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.874017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.874028 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.976089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.976120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.976139 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.976154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:33 crc kubenswrapper[4799]: I0121 17:33:33.976163 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:33Z","lastTransitionTime":"2026-01-21T17:33:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.058190 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerStarted","Data":"00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.079191 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.079370 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.079449 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.079518 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.079594 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:34Z","lastTransitionTime":"2026-01-21T17:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.081093 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.112248 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.128388 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.145865 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.162664 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.183317 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.183488 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.183577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.183669 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.183765 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:34Z","lastTransitionTime":"2026-01-21T17:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.187418 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.209034 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.224469 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.240762 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.258892 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.273011 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.286246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.286415 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.286477 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.286541 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.286619 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:34Z","lastTransitionTime":"2026-01-21T17:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.510188 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.518211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.519072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.519209 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.519333 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.519416 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:34Z","lastTransitionTime":"2026-01-21T17:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.527138 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.549351 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.587654 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.603546 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.616872 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.624574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.624632 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.624643 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.624665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.624689 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:34Z","lastTransitionTime":"2026-01-21T17:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.630985 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.642518 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.656171 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.669786 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.687495 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.699839 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.750765 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.752884 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.752908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.752917 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.752933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.752943 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:34Z","lastTransitionTime":"2026-01-21T17:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.766581 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.769724 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 09:27:35.505414789 +0000 UTC Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.786808 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.819900 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.856417 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.856492 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.856502 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.856521 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.856535 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:34Z","lastTransitionTime":"2026-01-21T17:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.895272 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.959650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.959699 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.959713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.959734 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:34 crc kubenswrapper[4799]: I0121 17:33:34.959745 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:34Z","lastTransitionTime":"2026-01-21T17:33:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.062369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.062404 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.062412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.062428 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.062437 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.165953 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.165988 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.166001 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.166019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.166031 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.204070 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:35 crc kubenswrapper[4799]: E0121 17:33:35.204271 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.204470 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:35 crc kubenswrapper[4799]: E0121 17:33:35.204557 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.204600 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:35 crc kubenswrapper[4799]: E0121 17:33:35.204776 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.269304 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.269387 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.269398 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.269415 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.269427 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.372579 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.372633 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.372643 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.372665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.372675 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.476119 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.476480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.476489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.476504 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.476514 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.579175 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.579216 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.579229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.579246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.579258 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.681407 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.681467 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.681487 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.681505 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.681519 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.771473 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 05:05:58.804463784 +0000 UTC Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.784758 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.784840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.784858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.784880 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.784894 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.888182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.888234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.888250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.888284 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.888311 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.991022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.991083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.991093 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.991117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:35 crc kubenswrapper[4799]: I0121 17:33:35.991141 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:35Z","lastTransitionTime":"2026-01-21T17:33:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.047870 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85"] Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.049237 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.052907 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.053565 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.064301 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95xwd\" (UniqueName: \"kubernetes.io/projected/717a7f1f-de20-4d1a-a943-0aef95ea6b45-kube-api-access-95xwd\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.064346 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/717a7f1f-de20-4d1a-a943-0aef95ea6b45-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.064392 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/717a7f1f-de20-4d1a-a943-0aef95ea6b45-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.064415 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/717a7f1f-de20-4d1a-a943-0aef95ea6b45-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.069967 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.084724 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.093539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.093581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.093592 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.093609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.093622 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.098741 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.111754 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.132484 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.148309 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.161599 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.164920 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/717a7f1f-de20-4d1a-a943-0aef95ea6b45-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.165033 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/717a7f1f-de20-4d1a-a943-0aef95ea6b45-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.165054 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95xwd\" (UniqueName: \"kubernetes.io/projected/717a7f1f-de20-4d1a-a943-0aef95ea6b45-kube-api-access-95xwd\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.165083 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/717a7f1f-de20-4d1a-a943-0aef95ea6b45-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.166281 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/717a7f1f-de20-4d1a-a943-0aef95ea6b45-env-overrides\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.166467 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/717a7f1f-de20-4d1a-a943-0aef95ea6b45-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.171740 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/717a7f1f-de20-4d1a-a943-0aef95ea6b45-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.178835 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.190394 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95xwd\" (UniqueName: \"kubernetes.io/projected/717a7f1f-de20-4d1a-a943-0aef95ea6b45-kube-api-access-95xwd\") pod \"ovnkube-control-plane-749d76644c-6rb85\" (UID: \"717a7f1f-de20-4d1a-a943-0aef95ea6b45\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.196571 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.197485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.197550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.197570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.197604 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.197616 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.216517 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.235629 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.253602 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.268438 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.283899 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.297249 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:36Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.301159 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.301203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.301413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.301456 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.301472 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.364332 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.405462 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.405507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.405516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.405539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.405550 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.508391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.508448 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.508461 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.508481 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.508496 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.612637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.612705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.612716 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.612736 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.612748 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.716376 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.716442 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.716456 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.716485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.716500 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.886536 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 04:26:46.043771908 +0000 UTC Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.888390 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.888441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.888453 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.888497 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.888516 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.992927 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.992978 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.993014 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.993033 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:36 crc kubenswrapper[4799]: I0121 17:33:36.993068 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:36Z","lastTransitionTime":"2026-01-21T17:33:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.080683 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" event={"ID":"717a7f1f-de20-4d1a-a943-0aef95ea6b45","Type":"ContainerStarted","Data":"f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.080748 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" event={"ID":"717a7f1f-de20-4d1a-a943-0aef95ea6b45","Type":"ContainerStarted","Data":"f5fb62b56887d0be9fcdbba8604112c411aed96f3be6d0505dd7ecda44fec2bd"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.084911 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe826811-0f15-453a-9849-dae49637b629" containerID="00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009" exitCode=0 Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.084968 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerDied","Data":"00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.095020 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.095067 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.095083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.095101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.095116 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:37Z","lastTransitionTime":"2026-01-21T17:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.109160 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.125115 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.141580 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.154314 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.169287 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.181523 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.197995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.198065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.198097 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.198163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.198182 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:37Z","lastTransitionTime":"2026-01-21T17:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.198298 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.205094 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.205122 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.205109 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:37 crc kubenswrapper[4799]: E0121 17:33:37.205285 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:37 crc kubenswrapper[4799]: E0121 17:33:37.205370 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:37 crc kubenswrapper[4799]: E0121 17:33:37.205512 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.211391 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.224802 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.240228 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.257846 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.283153 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.327862 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.328938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.328961 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.328971 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.328988 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.328997 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:37Z","lastTransitionTime":"2026-01-21T17:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.348462 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.382760 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.479913 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.479987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.479999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.480022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.480035 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:37Z","lastTransitionTime":"2026-01-21T17:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.559386 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-7q999"] Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.559964 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:37 crc kubenswrapper[4799]: E0121 17:33:37.560076 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.640687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.640768 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.640794 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.640825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.640850 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:37Z","lastTransitionTime":"2026-01-21T17:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.648620 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.669444 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.689277 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.707886 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.723245 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.736750 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gzlq\" (UniqueName: \"kubernetes.io/projected/7796adba-b973-44ee-b0c4-c0df544250e3-kube-api-access-5gzlq\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.736948 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.743741 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.743800 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.743816 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.743842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.743855 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:37Z","lastTransitionTime":"2026-01-21T17:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.744991 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.760797 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.780219 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.797489 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.816658 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.837790 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.837867 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gzlq\" (UniqueName: \"kubernetes.io/projected/7796adba-b973-44ee-b0c4-c0df544250e3-kube-api-access-5gzlq\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:37 crc kubenswrapper[4799]: E0121 17:33:37.838477 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:37 crc kubenswrapper[4799]: E0121 17:33:37.838542 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs podName:7796adba-b973-44ee-b0c4-c0df544250e3 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:38.338524741 +0000 UTC m=+44.964814764 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs") pod "network-metrics-daemon-7q999" (UID: "7796adba-b973-44ee-b0c4-c0df544250e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.845327 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.846445 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.846502 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.846514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.846532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.846546 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:37Z","lastTransitionTime":"2026-01-21T17:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.861444 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gzlq\" (UniqueName: \"kubernetes.io/projected/7796adba-b973-44ee-b0c4-c0df544250e3-kube-api-access-5gzlq\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.873082 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.886918 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 09:11:37.36575585 +0000 UTC Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.890995 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.906404 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.933198 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.963559 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.963634 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.963646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.963666 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.963681 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:37Z","lastTransitionTime":"2026-01-21T17:33:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:37 crc kubenswrapper[4799]: I0121 17:33:37.984861 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:37Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:37.999987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.000066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.000080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.000102 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.000128 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: E0121 17:33:38.013588 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.018930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.018994 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.019041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.019073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.019086 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: E0121 17:33:38.031851 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.035720 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.035772 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.035792 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.035816 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.035833 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: E0121 17:33:38.054195 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.058189 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.058233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.058242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.058259 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.058273 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: E0121 17:33:38.070750 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.075248 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.075303 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.075314 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.075337 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.075353 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: E0121 17:33:38.092339 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: E0121 17:33:38.092483 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.093717 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.093742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.093753 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.093775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.093792 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.093869 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerStarted","Data":"f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.096658 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" event={"ID":"717a7f1f-de20-4d1a-a943-0aef95ea6b45","Type":"ContainerStarted","Data":"5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.108931 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.130594 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.145792 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.161353 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.190101 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.197454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.197815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.197823 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.197839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.197850 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.213028 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.229841 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.243061 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.258832 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.271983 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.295584 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.301190 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.301250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.301266 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.301291 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.301305 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.313185 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.327411 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.342035 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:38 crc kubenswrapper[4799]: E0121 17:33:38.342226 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:38 crc kubenswrapper[4799]: E0121 17:33:38.342312 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs podName:7796adba-b973-44ee-b0c4-c0df544250e3 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:39.342287708 +0000 UTC m=+45.968577731 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs") pod "network-metrics-daemon-7q999" (UID: "7796adba-b973-44ee-b0c4-c0df544250e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.342778 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.354156 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.365535 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.376766 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.394198 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.404650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.404687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.404697 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.404717 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.404729 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.409386 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.423965 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.439434 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.464009 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.478718 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.494098 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.508033 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.508780 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.508818 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.508826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.508846 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.508857 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.521627 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.536575 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.549898 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.566729 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.583280 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.597847 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.612636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.612694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.612708 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.612729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.612740 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.614529 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:38Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.715631 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.715692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.715706 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.715726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.715739 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.819014 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.819072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.819083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.819103 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.819112 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.887435 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 17:07:26.753663552 +0000 UTC Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.921584 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.921918 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.922026 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.922110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:38 crc kubenswrapper[4799]: I0121 17:33:38.922211 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:38Z","lastTransitionTime":"2026-01-21T17:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.025797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.026203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.026280 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.026350 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.026433 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.110884 4799 generic.go:334] "Generic (PLEG): container finished" podID="fe826811-0f15-453a-9849-dae49637b629" containerID="f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8" exitCode=0 Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.111725 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerDied","Data":"f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.111764 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" event={"ID":"fe826811-0f15-453a-9849-dae49637b629","Type":"ContainerStarted","Data":"ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.127082 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.142973 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.160487 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.163694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.163732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.163741 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.163759 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.163772 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.178921 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.191491 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.204964 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.205016 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.204986 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.204969 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:39 crc kubenswrapper[4799]: E0121 17:33:39.205160 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:39 crc kubenswrapper[4799]: E0121 17:33:39.205299 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:39 crc kubenswrapper[4799]: E0121 17:33:39.205380 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:39 crc kubenswrapper[4799]: E0121 17:33:39.205431 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.211034 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.266589 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.268287 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.268317 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.268328 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.268345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.268355 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.278462 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.289352 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.298888 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.309507 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.319483 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.333759 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.346663 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.359642 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.367407 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:39 crc kubenswrapper[4799]: E0121 17:33:39.367556 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:39 crc kubenswrapper[4799]: E0121 17:33:39.367619 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs podName:7796adba-b973-44ee-b0c4-c0df544250e3 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:41.367598364 +0000 UTC m=+47.993888387 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs") pod "network-metrics-daemon-7q999" (UID: "7796adba-b973-44ee-b0c4-c0df544250e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.370988 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.371018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.371027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.371043 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.371052 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.374598 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.474871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.474933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.474951 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.474974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.474993 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.578565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.578624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.578636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.578657 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.578670 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.681521 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.681576 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.681588 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.681609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.681632 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.785028 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.785087 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.785102 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.785147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.785162 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.887595 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 14:04:02.061477908 +0000 UTC Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.888039 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.888145 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.888232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.888326 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.888398 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.991324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.991682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.991751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.991817 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:39 crc kubenswrapper[4799]: I0121 17:33:39.991942 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:39Z","lastTransitionTime":"2026-01-21T17:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.094924 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.094980 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.094990 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.095009 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.095019 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.118445 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/0.log" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.123214 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db" exitCode=1 Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.123301 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.124182 4799 scope.go:117] "RemoveContainer" containerID="403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.146216 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.163606 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.181396 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.197977 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.198203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.198587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.198688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.198785 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.198861 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.216340 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.243156 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"message\\\":\\\"158 5975 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:33:39.438282 5975 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:33:39.439100 5975 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0121 17:33:39.439119 5975 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0121 17:33:39.439164 5975 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0121 17:33:39.439179 5975 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:33:39.439222 5975 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0121 17:33:39.439235 5975 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0121 17:33:39.439252 5975 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:33:39.439273 5975 handler.go:208] Removed *v1.Node event handler 2\\\\nI0121 17:33:39.439293 5975 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0121 17:33:39.439328 5975 handler.go:208] Removed *v1.Node event handler 7\\\\nI0121 17:33:39.439362 5975 factory.go:656] Stopping watch factory\\\\nI0121 17:33:39.439384 5975 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:33:39.439408 5975 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0121 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.258477 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.273007 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.287584 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.300373 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.302440 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.302494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.302506 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.302527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.302538 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.314567 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.326515 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.341923 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.356838 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.372024 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.387472 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.405656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.405737 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.405756 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.405779 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.405796 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.509113 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.509198 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.509211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.509233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.509252 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.612278 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.612345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.612357 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.612378 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.612395 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.715620 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.715701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.715712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.715740 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.715785 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.818080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.818145 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.818158 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.818177 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.818186 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.888575 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 12:22:55.959397313 +0000 UTC Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.920781 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.920825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.920834 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.920854 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:40 crc kubenswrapper[4799]: I0121 17:33:40.920864 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:40Z","lastTransitionTime":"2026-01-21T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.023196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.023518 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.023531 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.023552 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.023562 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.126589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.126632 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.126644 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.126664 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.126677 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.129376 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/0.log" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.131115 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.132099 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.147399 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.166666 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.182404 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.185540 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.185692 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:33:57.185658963 +0000 UTC m=+63.811948996 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.198335 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.204067 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.204109 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.204161 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.204191 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.204069 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.204384 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.204441 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.204491 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.212849 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.229310 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.229350 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.229361 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.229381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.229396 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.238108 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"message\\\":\\\"158 5975 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:33:39.438282 5975 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:33:39.439100 5975 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0121 17:33:39.439119 5975 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0121 17:33:39.439164 5975 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0121 17:33:39.439179 5975 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:33:39.439222 5975 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0121 17:33:39.439235 5975 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0121 17:33:39.439252 5975 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:33:39.439273 5975 handler.go:208] Removed *v1.Node event handler 2\\\\nI0121 17:33:39.439293 5975 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0121 17:33:39.439328 5975 handler.go:208] Removed *v1.Node event handler 7\\\\nI0121 17:33:39.439362 5975 factory.go:656] Stopping watch factory\\\\nI0121 17:33:39.439384 5975 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:33:39.439408 5975 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0121 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.250416 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.263737 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.277265 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.286428 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.286938 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.287039 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.287227 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.287320 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:57.287298971 +0000 UTC m=+63.913588984 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.287152 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.287543 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:57.287531938 +0000 UTC m=+63.913821961 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.300667 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.313102 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.326156 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.331464 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.331499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.331510 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.331527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.331536 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.343581 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.358854 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.372586 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:41Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.388335 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.388565 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.388673 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs podName:7796adba-b973-44ee-b0c4-c0df544250e3 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:45.388650481 +0000 UTC m=+52.014940504 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs") pod "network-metrics-daemon-7q999" (UID: "7796adba-b973-44ee-b0c4-c0df544250e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.434536 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.434599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.434609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.434630 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.434640 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.488975 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.489059 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.489245 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.489276 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.489292 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.489348 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:57.489331451 +0000 UTC m=+64.115621474 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.489245 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.489832 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.489867 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:41 crc kubenswrapper[4799]: E0121 17:33:41.489986 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:57.489952949 +0000 UTC m=+64.116242992 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.537005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.537051 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.537061 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.537076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.537085 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.639661 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.639719 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.639729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.639746 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.639756 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.742777 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.742826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.742838 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.742859 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.742872 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.845508 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.845569 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.845580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.845605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.845619 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.889590 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 13:06:54.303784349 +0000 UTC Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.949737 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.949817 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.949830 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.949852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:41 crc kubenswrapper[4799]: I0121 17:33:41.949868 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:41Z","lastTransitionTime":"2026-01-21T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.052194 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.052232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.052241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.052255 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.052266 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:42Z","lastTransitionTime":"2026-01-21T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.139508 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/1.log" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.140985 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/0.log" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.146633 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d" exitCode=1 Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.146692 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.146745 4799 scope.go:117] "RemoveContainer" containerID="403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.150330 4799 scope.go:117] "RemoveContainer" containerID="93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d" Jan 21 17:33:42 crc kubenswrapper[4799]: E0121 17:33:42.150626 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.154966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.155026 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.155040 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.155060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.155072 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:42Z","lastTransitionTime":"2026-01-21T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.163871 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.179645 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.194099 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.407906 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.408793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.408847 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.408862 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.408885 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.408904 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:42Z","lastTransitionTime":"2026-01-21T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.423838 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.444722 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://403c0d5c1bc61a208b853704f44e5c83c2a07f9250a6e470cac551383a6215db\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"message\\\":\\\"158 5975 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:33:39.438282 5975 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0121 17:33:39.439100 5975 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0121 17:33:39.439119 5975 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0121 17:33:39.439164 5975 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0121 17:33:39.439179 5975 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0121 17:33:39.439222 5975 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0121 17:33:39.439235 5975 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0121 17:33:39.439252 5975 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0121 17:33:39.439273 5975 handler.go:208] Removed *v1.Node event handler 2\\\\nI0121 17:33:39.439293 5975 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0121 17:33:39.439328 5975 handler.go:208] Removed *v1.Node event handler 7\\\\nI0121 17:33:39.439362 5975 factory.go:656] Stopping watch factory\\\\nI0121 17:33:39.439384 5975 ovnkube.go:599] Stopped ovnkube\\\\nI0121 17:33:39.439408 5975 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0121 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.460816 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.478354 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.492047 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.504402 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.511552 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.511590 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.511600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.511621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.511631 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:42Z","lastTransitionTime":"2026-01-21T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.520070 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.533045 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.548100 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.565441 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.579926 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.592460 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:42Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.619269 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.619327 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.619338 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.619360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.619371 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:42Z","lastTransitionTime":"2026-01-21T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.722768 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.722814 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.722825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.722844 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.722855 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:42Z","lastTransitionTime":"2026-01-21T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.825482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.825548 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.825558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.825577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.825587 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:42Z","lastTransitionTime":"2026-01-21T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.890113 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 14:17:41.353030064 +0000 UTC Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.928705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.928762 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.928771 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.928789 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:42 crc kubenswrapper[4799]: I0121 17:33:42.928801 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:42Z","lastTransitionTime":"2026-01-21T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.031412 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.031463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.031474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.031494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.031506 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.134752 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.134791 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.134801 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.134818 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.134830 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.152918 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/1.log" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.156834 4799 scope.go:117] "RemoveContainer" containerID="93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d" Jan 21 17:33:43 crc kubenswrapper[4799]: E0121 17:33:43.157049 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.175421 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.188720 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.202080 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.204406 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:43 crc kubenswrapper[4799]: E0121 17:33:43.204556 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.204651 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.204738 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:43 crc kubenswrapper[4799]: E0121 17:33:43.204864 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:43 crc kubenswrapper[4799]: E0121 17:33:43.204991 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.205244 4799 scope.go:117] "RemoveContainer" containerID="0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.204563 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:43 crc kubenswrapper[4799]: E0121 17:33:43.205783 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.213997 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.226977 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.240679 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.240715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.240725 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.240744 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.240755 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.241314 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.260121 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.277766 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.292992 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.307183 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.321009 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.335337 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.342769 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.342804 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.342816 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.342833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.342845 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.350323 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.364815 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.386715 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.400729 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:43Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.445273 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.445324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.445336 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.445355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.445367 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.548223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.548263 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.548277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.548295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.548308 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.742919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.743257 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.743325 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.743392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.743510 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.846515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.846543 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.846551 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.846570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.846579 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.890909 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 23:47:40.133442274 +0000 UTC Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.952415 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.952487 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.952507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.952532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:43 crc kubenswrapper[4799]: I0121 17:33:43.952556 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:43Z","lastTransitionTime":"2026-01-21T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.055835 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.055883 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.055892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.055910 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.055921 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.158633 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.158688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.158700 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.158726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.158754 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.162598 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.164487 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.166010 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.181819 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.194012 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.208443 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.223636 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.235681 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.252439 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.261668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.261715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.261729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.261747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.261758 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.266256 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.283364 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.296470 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.311223 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.325820 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.340293 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.353876 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.364186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.364228 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.364239 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.364257 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.364268 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.374816 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.388852 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.404188 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.418243 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.431516 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.444535 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.457426 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.467089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.467145 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.467156 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.467172 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.467186 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.473563 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.490415 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.503946 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.517569 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.539496 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.553329 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.567376 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.569237 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.569289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.569301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.569320 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.569331 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.620544 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.648443 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.672070 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.672104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.672112 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.672164 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.672176 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.672180 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.685747 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.701296 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:44Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.774626 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.774668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.774680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.774696 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.774707 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.877090 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.877176 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.877188 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.877208 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.877221 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.891222 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 20:24:09.799006683 +0000 UTC Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.980102 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.980167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.980184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.980204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:44 crc kubenswrapper[4799]: I0121 17:33:44.980216 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:44Z","lastTransitionTime":"2026-01-21T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.082842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.082899 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.082908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.082924 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.082935 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.185694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.185730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.185742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.185758 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.185768 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.205012 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.205022 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.205038 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.205159 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:45 crc kubenswrapper[4799]: E0121 17:33:45.205247 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:45 crc kubenswrapper[4799]: E0121 17:33:45.205399 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:45 crc kubenswrapper[4799]: E0121 17:33:45.205495 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:45 crc kubenswrapper[4799]: E0121 17:33:45.205609 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.292693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.292745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.292756 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.292775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.292786 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.395517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.395555 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.395564 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.395580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.395589 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.457744 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:45 crc kubenswrapper[4799]: E0121 17:33:45.457933 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:45 crc kubenswrapper[4799]: E0121 17:33:45.458023 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs podName:7796adba-b973-44ee-b0c4-c0df544250e3 nodeName:}" failed. No retries permitted until 2026-01-21 17:33:53.457999038 +0000 UTC m=+60.084289061 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs") pod "network-metrics-daemon-7q999" (UID: "7796adba-b973-44ee-b0c4-c0df544250e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.499044 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.499088 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.499103 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.499120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.499146 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.601523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.601575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.601585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.601602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.601612 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.704501 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.704551 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.704561 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.704575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.704587 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.806987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.807041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.807055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.807075 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.807086 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.891626 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 17:44:48.351956391 +0000 UTC Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.909699 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.909743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.909753 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.909769 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:45 crc kubenswrapper[4799]: I0121 17:33:45.909779 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:45Z","lastTransitionTime":"2026-01-21T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.012340 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.012391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.012401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.012420 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.012430 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.114771 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.114851 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.114865 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.114889 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.114900 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.217558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.217615 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.217625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.217643 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.217654 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.320470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.320531 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.320542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.320559 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.320574 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.423064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.423117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.423151 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.423170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.423181 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.526460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.526508 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.526523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.526541 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.526551 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.628606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.628675 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.628687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.628705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.628715 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.731831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.731901 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.731915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.731937 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.731952 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.839391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.839446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.839455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.839474 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.839484 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.892176 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 11:09:39.697609099 +0000 UTC Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.943049 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.943110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.943150 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.943171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:46 crc kubenswrapper[4799]: I0121 17:33:46.943184 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:46Z","lastTransitionTime":"2026-01-21T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.036064 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.045183 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.046938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.046995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.047016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.047039 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.047051 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.052712 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.069939 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.088099 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.102513 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.118411 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.130596 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.146286 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.150034 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.150080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.150092 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.150114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.150160 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.164456 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.179871 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.193294 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.204579 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.204662 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.204687 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.204599 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:47 crc kubenswrapper[4799]: E0121 17:33:47.204789 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:47 crc kubenswrapper[4799]: E0121 17:33:47.204911 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:47 crc kubenswrapper[4799]: E0121 17:33:47.205248 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:47 crc kubenswrapper[4799]: E0121 17:33:47.206309 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.208809 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.233147 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.249697 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.253239 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.253310 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.253326 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.253354 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.253370 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.269310 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.301676 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.316652 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:47Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.356533 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.356584 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.356597 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.356614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.356626 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.459482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.459532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.459544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.459564 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.459576 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.562745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.562840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.562857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.562877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.562891 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.665756 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.666265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.666387 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.666511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.666614 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.777211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.777260 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.777270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.777286 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.777295 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.879582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.880306 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.880714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.880861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.880956 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.893055 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 00:22:53.25547475 +0000 UTC Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.987122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.987175 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.987210 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.987229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:47 crc kubenswrapper[4799]: I0121 17:33:47.987240 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:47Z","lastTransitionTime":"2026-01-21T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.090249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.090293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.090323 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.090342 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.090352 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.187935 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.187999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.188013 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.188038 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.188054 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: E0121 17:33:48.203742 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:48Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.208566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.208597 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.208605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.208616 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.208627 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: E0121 17:33:48.225352 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:48Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.230311 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.230528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.230653 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.230789 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.230933 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: E0121 17:33:48.244322 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:48Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.249522 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.249577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.249585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.249604 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.249614 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: E0121 17:33:48.262062 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:48Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.265401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.265428 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.265439 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.265455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.265465 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: E0121 17:33:48.276848 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:48Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:48 crc kubenswrapper[4799]: E0121 17:33:48.277008 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.278324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.278340 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.278347 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.278360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.278370 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.380826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.380875 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.380885 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.380904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.380915 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.484270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.484339 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.484352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.484371 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.484381 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.586705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.586799 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.586815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.586836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.586874 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.689974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.690023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.690032 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.690049 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.690062 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.793201 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.793252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.793267 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.793288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.793302 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.893615 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 08:35:47.287085176 +0000 UTC Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.895292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.895328 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.895350 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.895367 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.895378 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.998447 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.998505 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.998516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.998537 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:48 crc kubenswrapper[4799]: I0121 17:33:48.998562 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:48Z","lastTransitionTime":"2026-01-21T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.101789 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.101841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.101856 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.101877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.101889 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.204010 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.204166 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.204189 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.204338 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:49 crc kubenswrapper[4799]: E0121 17:33:49.204330 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:49 crc kubenswrapper[4799]: E0121 17:33:49.204461 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:49 crc kubenswrapper[4799]: E0121 17:33:49.204561 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:49 crc kubenswrapper[4799]: E0121 17:33:49.204644 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.204954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.204980 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.204989 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.205005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.205014 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.308062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.308116 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.308178 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.308206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.308223 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.414275 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.414346 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.414358 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.414421 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.414443 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.517164 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.517222 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.517234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.517272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.517307 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.620607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.620689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.620703 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.620749 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.620762 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.723577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.723645 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.723656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.723673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.723684 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.826970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.827025 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.827036 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.827056 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.827070 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.893865 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 15:42:46.30127038 +0000 UTC Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.929875 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.929946 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.929959 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.929982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:49 crc kubenswrapper[4799]: I0121 17:33:49.929996 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:49Z","lastTransitionTime":"2026-01-21T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.032759 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.032815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.032823 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.032842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.032852 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.134807 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.134850 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.134861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.134880 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.134892 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.238455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.238516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.238529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.238553 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.238568 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.341889 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.341958 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.341967 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.341987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.341998 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.446087 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.446199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.446213 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.446237 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.446252 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.550751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.550904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.550925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.550970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.550990 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.654896 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.654976 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.654995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.655021 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.655055 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.758831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.758959 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.759016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.759073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.759118 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.861843 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.861903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.861912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.861945 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.861964 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.895061 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 22:17:22.962707627 +0000 UTC Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.965764 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.965877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.965910 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.965936 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:50 crc kubenswrapper[4799]: I0121 17:33:50.965971 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:50Z","lastTransitionTime":"2026-01-21T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.069042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.069104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.069118 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.069167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.069180 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.172341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.172424 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.172439 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.172460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.172474 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.205178 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.205237 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.205234 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.205305 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:51 crc kubenswrapper[4799]: E0121 17:33:51.205459 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:51 crc kubenswrapper[4799]: E0121 17:33:51.205551 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:51 crc kubenswrapper[4799]: E0121 17:33:51.205692 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:51 crc kubenswrapper[4799]: E0121 17:33:51.205795 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.275483 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.275563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.275580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.275602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.275615 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.378705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.378776 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.378788 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.378813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.378829 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.481548 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.481589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.481598 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.481614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.481625 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.584012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.584060 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.584069 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.584086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.584096 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.686731 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.686786 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.686801 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.686822 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.686848 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.790064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.790109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.790120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.790151 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.790165 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.893171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.893224 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.893236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.893262 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.893273 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.895609 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 06:24:50.149303656 +0000 UTC Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.995891 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.995961 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.995972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.995992 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:51 crc kubenswrapper[4799]: I0121 17:33:51.996012 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:51Z","lastTransitionTime":"2026-01-21T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.098763 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.098818 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.098833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.098852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.098865 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.200532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.200578 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.200588 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.200610 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.200621 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.303198 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.303247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.303256 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.303273 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.303283 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.406320 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.406386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.406397 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.406418 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.406432 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.509151 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.509200 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.509216 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.509238 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.509252 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.612497 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.612560 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.612578 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.612596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.612611 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.715654 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.715713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.715726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.715745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.715757 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.819195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.819249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.819260 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.819279 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.819293 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.895989 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 23:55:18.40668967 +0000 UTC Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.922693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.922775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.922787 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.922817 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:52 crc kubenswrapper[4799]: I0121 17:33:52.922830 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:52Z","lastTransitionTime":"2026-01-21T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.028770 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.028823 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.029074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.029091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.029102 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.132795 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.132839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.132849 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.132865 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.132874 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.204845 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.204909 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.204956 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.204861 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:53 crc kubenswrapper[4799]: E0121 17:33:53.205147 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:53 crc kubenswrapper[4799]: E0121 17:33:53.205023 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:53 crc kubenswrapper[4799]: E0121 17:33:53.205674 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.205884 4799 scope.go:117] "RemoveContainer" containerID="93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d" Jan 21 17:33:53 crc kubenswrapper[4799]: E0121 17:33:53.205992 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.235465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.235500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.235509 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.235524 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.235533 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.339315 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.339362 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.339373 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.339391 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.339403 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.443535 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.443579 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.443588 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.443615 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.443626 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.464551 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:53 crc kubenswrapper[4799]: E0121 17:33:53.464771 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:53 crc kubenswrapper[4799]: E0121 17:33:53.464892 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs podName:7796adba-b973-44ee-b0c4-c0df544250e3 nodeName:}" failed. No retries permitted until 2026-01-21 17:34:09.46486823 +0000 UTC m=+76.091158253 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs") pod "network-metrics-daemon-7q999" (UID: "7796adba-b973-44ee-b0c4-c0df544250e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.546517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.546596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.546607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.546649 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.546661 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.650084 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.650162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.650178 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.650210 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.650224 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.760031 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.760073 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.760081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.760096 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.760105 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.862836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.862890 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.862899 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.862916 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.862926 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.896373 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 14:25:43.484447799 +0000 UTC Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.965788 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.965832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.965841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.965858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:53 crc kubenswrapper[4799]: I0121 17:33:53.965869 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:53Z","lastTransitionTime":"2026-01-21T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.068181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.068239 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.068249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.068269 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.068279 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.171229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.171319 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.171345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.171381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.171419 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.219509 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/1.log" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.224908 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.236740 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.258954 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.274236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.274540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.274608 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.274709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.274780 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.283475 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.309662 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.478541 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.478582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.478594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.478613 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.478625 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.481157 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.495040 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.514432 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.531931 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.580845 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.580882 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.580893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.580909 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.580918 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.632668 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.644761 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.660938 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.675510 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.686148 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.686199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.686212 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.686229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.686243 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.690878 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.705100 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.720187 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.731958 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.754800 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.774808 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.789299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.789345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.789356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.789374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.789386 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.791349 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.806576 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.819051 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.834986 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.849160 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.861537 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.867836 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.880466 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.892864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.892919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.892932 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.892954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.892968 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.897345 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 23:10:55.68096542 +0000 UTC Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.903347 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.915229 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.932991 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.947649 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.961911 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.974035 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.987155 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.995234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.995287 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.995300 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.995321 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:54 crc kubenswrapper[4799]: I0121 17:33:54.995333 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:54Z","lastTransitionTime":"2026-01-21T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.001454 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:54Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.015570 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.027049 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.055182 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.067809 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.082303 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.113459 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.126730 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.142436 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.157884 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.158201 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.158274 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.158356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.158431 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.159769 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.172347 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.193002 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.208577 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.208930 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.209084 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.208970 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:55 crc kubenswrapper[4799]: E0121 17:33:55.209387 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:55 crc kubenswrapper[4799]: E0121 17:33:55.209512 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:55 crc kubenswrapper[4799]: E0121 17:33:55.209758 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:55 crc kubenswrapper[4799]: E0121 17:33:55.209828 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.216044 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.229498 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.244112 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.255992 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.261011 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.261153 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.261227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.261338 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.261407 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.269800 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.280569 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.296113 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:55Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.363931 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.363970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.363980 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.363995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.364006 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.466524 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.466565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.466573 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.466591 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.466601 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.569527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.569574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.569583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.569599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.569608 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.672275 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.672338 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.672352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.672375 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.672389 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.775580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.775632 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.775642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.775660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.775670 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.878298 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.878374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.878388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.878413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.878440 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.897658 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 23:57:01.322149602 +0000 UTC Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.980986 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.981587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.981693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.981783 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:55 crc kubenswrapper[4799]: I0121 17:33:55.981897 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:55Z","lastTransitionTime":"2026-01-21T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.022485 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.085280 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.085608 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.085687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.085764 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.085873 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.189076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.189146 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.189160 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.189182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.189196 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.238293 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/2.log" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.239109 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/1.log" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.243158 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.243151 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9" exitCode=1 Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.243225 4799 scope.go:117] "RemoveContainer" containerID="93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.244254 4799 scope.go:117] "RemoveContainer" containerID="32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9" Jan 21 17:33:56 crc kubenswrapper[4799]: E0121 17:33:56.244504 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.261401 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.277086 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.290358 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.291460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.291485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.291493 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.291508 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.291517 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.304147 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.318456 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.333621 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.345931 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.358400 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.372115 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.383938 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.393903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.393960 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.393974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.393997 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.394007 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.395619 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.407441 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.467608 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.479356 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.490244 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.497332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.497386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.497399 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.497418 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.497430 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.504079 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.521816 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:56Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.600840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.600881 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.600894 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.600914 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.600929 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.703030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.703082 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.703095 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.703113 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.703141 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.806168 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.806243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.806265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.806288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.806307 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.898020 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 04:06:34.936583262 +0000 UTC Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.908815 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.908855 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.908864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.908880 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:56 crc kubenswrapper[4799]: I0121 17:33:56.908890 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:56Z","lastTransitionTime":"2026-01-21T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.011742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.011798 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.011810 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.011831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.011842 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.114882 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.114934 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.114948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.114970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.114984 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.204180 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.204218 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.204215 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.204180 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.204394 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.204558 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.204621 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.204715 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.214462 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.214645 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:34:29.214619132 +0000 UTC m=+95.840909155 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.218065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.218151 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.218166 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.218186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.218200 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.249731 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/2.log" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.315254 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.315306 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.315471 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.315471 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.315540 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:34:29.315519279 +0000 UTC m=+95.941809302 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.315583 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:34:29.3155567 +0000 UTC m=+95.941846793 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.321342 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.321376 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.321386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.321403 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.321412 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.424431 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.424492 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.424507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.424529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.424546 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.517077 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.517184 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.517281 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.517306 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.517320 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.517380 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:34:29.517364063 +0000 UTC m=+96.143654086 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.517281 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.517634 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.517667 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:57 crc kubenswrapper[4799]: E0121 17:33:57.517802 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:34:29.517770385 +0000 UTC m=+96.144060398 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.527103 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.527174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.527185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.527206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.527226 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.630614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.630652 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.630660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.630674 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.630684 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.733651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.733702 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.733715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.733733 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.733743 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.839594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.839942 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.840401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.840425 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.840435 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.898954 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 03:43:08.309316243 +0000 UTC Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.943780 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.943866 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.943893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.943930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:57 crc kubenswrapper[4799]: I0121 17:33:57.943953 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:57Z","lastTransitionTime":"2026-01-21T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.046801 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.046842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.046853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.046871 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.046884 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.149767 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.149825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.149837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.149858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.149870 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.253323 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.253374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.253388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.253407 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.253420 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.356825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.356887 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.356902 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.356923 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.356935 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.476416 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.476459 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.476472 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.476494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.476514 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.531010 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.531059 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.531070 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.531087 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.531099 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: E0121 17:33:58.547524 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:58Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.552628 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.552679 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.552692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.552707 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.552717 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: E0121 17:33:58.565695 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:58Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.570302 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.570342 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.570350 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.570368 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.570378 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: E0121 17:33:58.583775 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:58Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.588400 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.588439 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.588449 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.588471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.588484 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: E0121 17:33:58.602122 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:58Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.606573 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.606607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.606618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.606634 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.606648 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: E0121 17:33:58.620396 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:33:58Z is after 2025-08-24T17:21:41Z" Jan 21 17:33:58 crc kubenswrapper[4799]: E0121 17:33:58.620514 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.622439 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.622485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.622494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.622530 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.622542 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.725227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.725280 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.725289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.725303 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.725313 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.827782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.827845 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.827858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.827879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.827892 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.899648 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 12:38:47.595318733 +0000 UTC Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.930968 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.931012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.931021 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.931037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:58 crc kubenswrapper[4799]: I0121 17:33:58.931046 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:58Z","lastTransitionTime":"2026-01-21T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.034512 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.034615 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.034675 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.034729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.034774 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.137614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.137663 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.137676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.137694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.137708 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.204826 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.204898 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.204847 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:33:59 crc kubenswrapper[4799]: E0121 17:33:59.204976 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.204826 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:33:59 crc kubenswrapper[4799]: E0121 17:33:59.205217 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:33:59 crc kubenswrapper[4799]: E0121 17:33:59.205257 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:33:59 crc kubenswrapper[4799]: E0121 17:33:59.205352 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.240545 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.240582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.240593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.240627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.240638 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.343498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.343535 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.343546 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.343566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.343577 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.451178 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.451243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.451283 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.451317 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.451338 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.554773 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.554829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.554841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.554861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.554877 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.659011 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.659429 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.659439 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.659458 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.659469 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.761764 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.761811 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.761821 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.761841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.761852 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.865318 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.865380 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.865394 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.865418 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.865440 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.900576 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 08:04:07.327718597 +0000 UTC Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.968489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.968525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.968539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.968558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:33:59 crc kubenswrapper[4799]: I0121 17:33:59.968572 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:33:59Z","lastTransitionTime":"2026-01-21T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.295365 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.295407 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.295419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.295438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.295452 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:00Z","lastTransitionTime":"2026-01-21T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.398413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.398450 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.398460 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.398476 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.398485 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:00Z","lastTransitionTime":"2026-01-21T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.502322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.502370 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.502381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.502405 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.502416 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:00Z","lastTransitionTime":"2026-01-21T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.605581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.605641 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.605656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.605679 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.605692 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:00Z","lastTransitionTime":"2026-01-21T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.708304 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.708349 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.708364 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.708382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.708395 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:00Z","lastTransitionTime":"2026-01-21T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.810831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.810867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.810876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.810894 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.810903 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:00Z","lastTransitionTime":"2026-01-21T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.901449 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 04:37:10.652687984 +0000 UTC Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.913995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.914037 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.914046 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.914064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:00 crc kubenswrapper[4799]: I0121 17:34:00.914075 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:00Z","lastTransitionTime":"2026-01-21T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.017479 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.017543 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.017560 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.017587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.017604 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.120662 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.120714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.120726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.120748 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.120760 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.204595 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.204648 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.204661 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.204632 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:01 crc kubenswrapper[4799]: E0121 17:34:01.204796 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:01 crc kubenswrapper[4799]: E0121 17:34:01.204917 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:01 crc kubenswrapper[4799]: E0121 17:34:01.205014 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:01 crc kubenswrapper[4799]: E0121 17:34:01.205087 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.223466 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.223544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.223554 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.223577 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.223589 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.326279 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.326329 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.326344 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.326360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.326372 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.428790 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.428859 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.428873 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.428892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.428951 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.531195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.531234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.531243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.531259 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.531272 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.634254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.634319 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.634330 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.634374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.634386 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.737833 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.737882 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.737894 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.737916 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.737932 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.841663 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.841709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.841721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.841742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.841761 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.901922 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 22:49:20.615177728 +0000 UTC Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.945232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.945281 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.945292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.945311 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:01 crc kubenswrapper[4799]: I0121 17:34:01.945322 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:01Z","lastTransitionTime":"2026-01-21T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.054293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.054367 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.054380 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.054400 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.054413 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.157312 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.157371 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.157384 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.157406 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.157421 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.264625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.264687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.264709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.264742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.264761 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.367829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.367903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.367915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.367937 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.367949 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.470627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.470673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.470698 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.470715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.470725 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.574608 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.574682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.574694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.574714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.574729 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.678496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.678552 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.678564 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.678587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.678597 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.781507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.781558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.781567 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.781587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.781597 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.884747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.884836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.884857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.884887 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.884908 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.902326 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 01:36:56.535314711 +0000 UTC Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.988063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.988106 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.988117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.988156 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:02 crc kubenswrapper[4799]: I0121 17:34:02.988169 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:02Z","lastTransitionTime":"2026-01-21T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.091574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.091625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.091638 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.091659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.091671 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.194147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.194204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.194219 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.194237 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.194251 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.204639 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.204649 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.204731 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.204790 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:03 crc kubenswrapper[4799]: E0121 17:34:03.204796 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:03 crc kubenswrapper[4799]: E0121 17:34:03.204906 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:03 crc kubenswrapper[4799]: E0121 17:34:03.204967 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:03 crc kubenswrapper[4799]: E0121 17:34:03.205048 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.297646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.297706 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.297715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.297733 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.297744 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.435551 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.435615 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.435627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.435650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.435663 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.538182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.538217 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.538227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.538242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.538253 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.640806 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.640840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.640849 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.640865 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.640874 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.743089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.743183 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.743196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.743217 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.743227 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.846162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.846290 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.846304 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.846327 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.846346 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.903494 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 11:40:06.287653707 +0000 UTC Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.950574 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.950650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.950665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.950686 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:03 crc kubenswrapper[4799]: I0121 17:34:03.950701 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:03Z","lastTransitionTime":"2026-01-21T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.053293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.053342 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.053355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.053374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.053385 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:04Z","lastTransitionTime":"2026-01-21T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.156174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.156233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.156243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.156265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.156279 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:04Z","lastTransitionTime":"2026-01-21T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.221289 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.234348 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.250065 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.258685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.258745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.258756 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.258775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.258789 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:04Z","lastTransitionTime":"2026-01-21T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.265041 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.278490 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.289328 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.316768 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba757b8138fb912019e978af71750fc672bb88c39af2f4de59a493de75782d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:41Z\\\",\\\"message\\\":\\\"etry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168620 6245 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI0121 17:33:41.168674 6245 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI0121 17:33:41.168671 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85\\\\nI0121 17:33:41.168681 6245 lb_config.go:1031] Cluster endpoints for openshift-authentication-operator/metrics for network=default are: map[]\\\\nI0121 17:33:41.168690 6245 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85 in node crc\\\\nI0121 17:33:41.168678 6245 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-bckxf\\\\nI0121 17:33:41.168693 6245 services_controller.go:443] Built service openshift-authentication-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.150\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.330556 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.347499 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.362689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.362731 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.362745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.362765 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.362777 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:04Z","lastTransitionTime":"2026-01-21T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.363656 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.381376 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.395099 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.411923 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.425667 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.436175 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.454099 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.467044 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.467395 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.467521 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.467633 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.467749 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:04Z","lastTransitionTime":"2026-01-21T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.469241 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:04Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.570557 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.571035 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.571201 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.571329 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.571450 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:04Z","lastTransitionTime":"2026-01-21T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.859999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.860036 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.860045 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.860062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.860072 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:04Z","lastTransitionTime":"2026-01-21T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.903785 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 07:45:52.346424895 +0000 UTC Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.962266 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.962324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.962335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.962352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:04 crc kubenswrapper[4799]: I0121 17:34:04.962364 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:04Z","lastTransitionTime":"2026-01-21T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.064974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.065030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.065041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.065059 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.065070 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.167879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.168255 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.168322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.168389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.168456 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.204280 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.204287 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.204301 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.204510 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:05 crc kubenswrapper[4799]: E0121 17:34:05.204694 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:05 crc kubenswrapper[4799]: E0121 17:34:05.204888 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:05 crc kubenswrapper[4799]: E0121 17:34:05.204973 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:05 crc kubenswrapper[4799]: E0121 17:34:05.205019 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.271763 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.271800 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.271811 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.271828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.271840 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.385714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.385751 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.385760 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.385777 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.385788 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.489076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.489113 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.489141 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.489159 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.489172 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.592532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.592585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.592596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.592629 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.592650 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.695015 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.695057 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.695068 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.695086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.695097 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.798012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.798589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.798699 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.798835 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.798944 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.901748 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.901808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.901819 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.901838 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.901848 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:05Z","lastTransitionTime":"2026-01-21T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:05 crc kubenswrapper[4799]: I0121 17:34:05.904030 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 18:30:00.963103706 +0000 UTC Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.005402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.005443 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.005454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.005472 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.005482 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.108499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.108534 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.108542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.108561 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.108573 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.210580 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.210622 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.210633 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.210645 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.210655 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.313194 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.313271 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.313281 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.313302 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.313316 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.415396 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.415744 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.415809 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.415881 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.415943 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.519170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.519219 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.519230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.519249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.519260 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.622070 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.622115 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.622155 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.622173 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.622182 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.724552 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.724595 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.724606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.724626 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.724638 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.826849 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.826887 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.826898 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.826919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.826931 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.905192 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 07:58:36.821062238 +0000 UTC Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.930168 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.930234 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.930246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.930270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:06 crc kubenswrapper[4799]: I0121 17:34:06.930283 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:06Z","lastTransitionTime":"2026-01-21T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.034096 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.034158 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.034168 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.034187 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.034197 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.136712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.136779 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.136788 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.136809 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.136821 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.204336 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.204377 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.204341 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.204364 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:07 crc kubenswrapper[4799]: E0121 17:34:07.204511 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:07 crc kubenswrapper[4799]: E0121 17:34:07.204569 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:07 crc kubenswrapper[4799]: E0121 17:34:07.204660 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:07 crc kubenswrapper[4799]: E0121 17:34:07.204729 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.239630 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.239721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.239740 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.239768 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.239791 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.342177 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.342224 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.342236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.342254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.342266 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.445571 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.445614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.445631 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.445651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.445663 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.548389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.548436 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.548450 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.548469 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.548482 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.650893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.650960 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.650970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.650987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.650999 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.753671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.753728 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.753746 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.753765 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.753776 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.856107 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.856184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.856195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.856214 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.856225 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.906251 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 23:23:20.871796144 +0000 UTC Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.959406 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.959459 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.959470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.959489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:07 crc kubenswrapper[4799]: I0121 17:34:07.959502 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:07Z","lastTransitionTime":"2026-01-21T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.062408 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.062461 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.062471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.062490 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.062500 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.164624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.164666 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.164675 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.164693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.164703 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.267777 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.267831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.267842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.267863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.267883 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.370621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.370680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.370694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.370710 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.370728 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.473467 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.473530 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.473540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.473559 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.473571 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.578774 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.578832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.578843 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.578860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.578871 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.682292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.682341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.682351 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.682368 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.682380 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.784920 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.784999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.785017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.785035 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.785066 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.856598 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.856649 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.856659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.856676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.856688 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: E0121 17:34:08.872825 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:08Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.878307 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.878360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.878371 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.878392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.878406 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: E0121 17:34:08.894431 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:08Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.900482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.900538 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.900549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.900639 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.900668 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.906352 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 09:33:08.55891321 +0000 UTC Jan 21 17:34:08 crc kubenswrapper[4799]: E0121 17:34:08.916514 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:08Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.921511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.921645 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.921682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.921727 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.921739 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: E0121 17:34:08.935656 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:08Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.941663 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.941734 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.941750 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.941780 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.941795 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:08 crc kubenswrapper[4799]: E0121 17:34:08.958107 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:08Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:08 crc kubenswrapper[4799]: E0121 17:34:08.958271 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.960629 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.960674 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.960687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.960713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:08 crc kubenswrapper[4799]: I0121 17:34:08.960728 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:08Z","lastTransitionTime":"2026-01-21T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.064102 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.064185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.064201 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.064221 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.064235 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.167735 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.167792 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.167807 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.167829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.167845 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.204315 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.204350 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.204383 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.204433 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:09 crc kubenswrapper[4799]: E0121 17:34:09.204484 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:09 crc kubenswrapper[4799]: E0121 17:34:09.204647 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:09 crc kubenswrapper[4799]: E0121 17:34:09.204712 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:09 crc kubenswrapper[4799]: E0121 17:34:09.204809 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.270811 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.270858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.270870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.270892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.270908 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.374188 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.374225 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.374236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.374253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.374263 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.477565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.477634 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.477647 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.477670 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.477683 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.519395 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:09 crc kubenswrapper[4799]: E0121 17:34:09.519661 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:34:09 crc kubenswrapper[4799]: E0121 17:34:09.519795 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs podName:7796adba-b973-44ee-b0c4-c0df544250e3 nodeName:}" failed. No retries permitted until 2026-01-21 17:34:41.519768221 +0000 UTC m=+108.146058444 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs") pod "network-metrics-daemon-7q999" (UID: "7796adba-b973-44ee-b0c4-c0df544250e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.580972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.581447 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.581602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.581713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.581895 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.684895 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.684937 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.684947 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.684963 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.684974 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.787974 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.788032 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.788048 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.788074 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.788094 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.890873 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.890920 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.890931 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.890955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.890970 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.907461 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 01:18:29.676601207 +0000 UTC Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.994107 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.994169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.994179 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.994199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:09 crc kubenswrapper[4799]: I0121 17:34:09.994210 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:09Z","lastTransitionTime":"2026-01-21T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.096628 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.096678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.096688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.096705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.096735 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.200213 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.200324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.200352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.200385 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.200408 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.302874 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.302908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.302918 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.302933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.302943 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.405523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.405581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.405594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.405612 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.405624 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.508028 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.508080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.508093 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.508112 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.508152 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.611399 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.611465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.611479 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.611508 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.611537 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.714575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.714627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.714638 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.714662 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.714681 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.817802 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.817853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.817864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.817887 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.817898 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.908432 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 10:16:26.600165458 +0000 UTC Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.921190 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.921283 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.921296 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.921314 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:10 crc kubenswrapper[4799]: I0121 17:34:10.921325 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:10Z","lastTransitionTime":"2026-01-21T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.024169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.024244 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.024263 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.024286 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.024297 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.127034 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.127084 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.127094 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.127111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.127139 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.204998 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.205095 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.205026 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.205314 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:11 crc kubenswrapper[4799]: E0121 17:34:11.205406 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:11 crc kubenswrapper[4799]: E0121 17:34:11.205629 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:11 crc kubenswrapper[4799]: E0121 17:34:11.205671 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:11 crc kubenswrapper[4799]: E0121 17:34:11.205857 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.206901 4799 scope.go:117] "RemoveContainer" containerID="32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9" Jan 21 17:34:11 crc kubenswrapper[4799]: E0121 17:34:11.207214 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.221387 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.230102 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.230158 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.230176 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.230196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.230210 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.234049 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.249904 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.267118 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.279292 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.302400 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.314350 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.333623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.333684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.333696 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.333715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.333729 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.335168 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.352756 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.370945 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.383211 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.399481 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.413420 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.430634 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.436160 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.436221 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.436235 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.436257 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.436272 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.447445 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.464360 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.479237 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:11Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.538652 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.538714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.538724 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.538744 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.538755 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.641263 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.641306 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.641316 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.641331 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.641343 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.747121 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.747184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.747194 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.747212 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.747222 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.850576 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.850624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.850635 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.850657 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.850668 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.909460 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 01:08:26.75580033 +0000 UTC Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.953608 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.953699 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.953742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.953765 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:11 crc kubenswrapper[4799]: I0121 17:34:11.953780 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:11Z","lastTransitionTime":"2026-01-21T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.056721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.056770 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.056796 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.056813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.056826 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.159485 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.159534 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.159545 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.159564 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.159576 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.262782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.262826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.262837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.262855 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.262868 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.366166 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.366217 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.366227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.366247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.366259 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.470488 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.470557 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.470575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.470599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.470614 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.574428 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.574489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.574503 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.574527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.574543 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.677480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.677528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.677539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.677559 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.677570 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.781630 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.781701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.781713 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.781732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.781744 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.884164 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.884206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.884215 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.884231 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.884240 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.909954 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 08:11:50.835121124 +0000 UTC Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.986728 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.986769 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.986779 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.986796 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:12 crc kubenswrapper[4799]: I0121 17:34:12.986805 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:12Z","lastTransitionTime":"2026-01-21T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.090095 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.090188 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.090206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.090236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.090257 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.193525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.193603 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.193623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.193651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.193671 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.204111 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.204241 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:13 crc kubenswrapper[4799]: E0121 17:34:13.204395 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.204415 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.204439 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:13 crc kubenswrapper[4799]: E0121 17:34:13.204671 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:13 crc kubenswrapper[4799]: E0121 17:34:13.204836 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:13 crc kubenswrapper[4799]: E0121 17:34:13.204966 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.296984 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.297038 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.297053 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.297113 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.297138 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.399652 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.399705 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.399715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.399732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.399743 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.501893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.501935 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.501943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.501959 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.501968 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.605544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.605588 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.605599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.605620 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.605638 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.709270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.709335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.709375 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.709417 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.709431 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.811864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.811903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.811914 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.811933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.811945 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.911149 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 10:02:00.042097546 +0000 UTC Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.914319 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.914360 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.914372 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.914390 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:13 crc kubenswrapper[4799]: I0121 17:34:13.914399 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:13Z","lastTransitionTime":"2026-01-21T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.017247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.017298 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.017311 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.017330 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.017370 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.119954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.119999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.120011 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.120032 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.120043 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.221205 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.222621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.222759 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.222783 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.222810 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.222869 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.237495 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.253875 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.268262 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.284797 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.297569 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.313316 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.326317 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.326363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.326566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.326588 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.326602 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.328651 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.349982 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.364873 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.377993 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.393377 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.408341 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.426650 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.428715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.428752 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.428761 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.428777 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.428786 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.444353 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.458820 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.479442 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:14Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.531433 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.531490 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.531508 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.531532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.531551 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.634527 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.634583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.634594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.634610 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.634619 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.737348 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.737420 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.737435 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.737458 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.737478 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.840292 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.840341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.840353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.840371 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.840380 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.911309 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 04:13:58.02250607 +0000 UTC Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.942694 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.942743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.942757 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.942776 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:14 crc kubenswrapper[4799]: I0121 17:34:14.942791 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:14Z","lastTransitionTime":"2026-01-21T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.044889 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.044938 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.044948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.044969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.044982 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.147186 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.147228 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.147236 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.147253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.147262 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.204598 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.204750 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.204949 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:15 crc kubenswrapper[4799]: E0121 17:34:15.204933 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.205003 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:15 crc kubenswrapper[4799]: E0121 17:34:15.205163 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:15 crc kubenswrapper[4799]: E0121 17:34:15.205226 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:15 crc kubenswrapper[4799]: E0121 17:34:15.205347 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.249767 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.249808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.249820 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.249838 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.249850 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.352196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.352335 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.352353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.352430 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.352485 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.455323 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.455369 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.455379 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.455397 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.455410 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.558973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.559041 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.559063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.559096 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.559119 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.661491 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.661530 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.661542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.661560 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.661572 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.764565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.764638 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.764648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.764667 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.764680 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.867617 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.867676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.867695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.867730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.867765 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.911542 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 09:26:45.228218863 +0000 UTC Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.970909 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.970971 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.970985 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.971008 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:15 crc kubenswrapper[4799]: I0121 17:34:15.971025 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:15Z","lastTransitionTime":"2026-01-21T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.073989 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.074031 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.074040 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.074058 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.074070 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.176517 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.176575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.176585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.176603 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.176613 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.279206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.279260 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.279272 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.279291 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.279343 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.351063 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/0.log" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.351149 4799 generic.go:334] "Generic (PLEG): container finished" podID="3004f2e1-bd6a-46a1-a6d9-835472f616b8" containerID="009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61" exitCode=1 Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.351190 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-sl7lv" event={"ID":"3004f2e1-bd6a-46a1-a6d9-835472f616b8","Type":"ContainerDied","Data":"009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.351671 4799 scope.go:117] "RemoveContainer" containerID="009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.373115 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.382597 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.382641 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.382652 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.382688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.382700 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.387217 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.403222 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.417574 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.433234 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.446656 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.471617 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.483319 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.485151 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.485203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.485213 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.485233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.485243 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.495721 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.507762 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.521010 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:15Z\\\",\\\"message\\\":\\\"2026-01-21T17:33:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed\\\\n2026-01-21T17:33:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed to /host/opt/cni/bin/\\\\n2026-01-21T17:33:30Z [verbose] multus-daemon started\\\\n2026-01-21T17:33:30Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:34:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.536494 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.549356 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.565072 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.579896 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.587326 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.587379 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.587397 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.587416 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.587426 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.593075 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.603300 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:16Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.691170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.691232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.691243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.691262 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.691271 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.793211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.793265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.793274 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.793293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.793304 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.908813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.908863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.908872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.908888 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.908901 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:16Z","lastTransitionTime":"2026-01-21T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:16 crc kubenswrapper[4799]: I0121 17:34:16.914199 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 06:54:22.171282261 +0000 UTC Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.011180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.011241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.011253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.011275 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.011286 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:17Z","lastTransitionTime":"2026-01-21T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.114202 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.114240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.114249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.114278 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.114290 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:17Z","lastTransitionTime":"2026-01-21T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.574296 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.574384 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.574433 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:17 crc kubenswrapper[4799]: E0121 17:34:17.574481 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.574506 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:17 crc kubenswrapper[4799]: E0121 17:34:17.574585 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:17 crc kubenswrapper[4799]: E0121 17:34:17.574712 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:17 crc kubenswrapper[4799]: E0121 17:34:17.574814 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.577223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.577247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.577255 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.577268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.577278 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:17Z","lastTransitionTime":"2026-01-21T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.579379 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/0.log" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.579437 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-sl7lv" event={"ID":"3004f2e1-bd6a-46a1-a6d9-835472f616b8","Type":"ContainerStarted","Data":"cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f"} Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.603167 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.610237 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.627671 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.639934 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.659243 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.672767 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.680119 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.680195 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.680205 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.680240 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.680251 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:17Z","lastTransitionTime":"2026-01-21T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.689810 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.704258 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.718206 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.730817 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.743112 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.757541 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.775419 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:15Z\\\",\\\"message\\\":\\\"2026-01-21T17:33:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed\\\\n2026-01-21T17:33:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed to /host/opt/cni/bin/\\\\n2026-01-21T17:33:30Z [verbose] multus-daemon started\\\\n2026-01-21T17:33:30Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:34:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:34:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.783187 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.783227 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.783238 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.783257 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.783267 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:17Z","lastTransitionTime":"2026-01-21T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.793625 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.808269 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.833055 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.846485 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.861855 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:17Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.885788 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.885843 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.885857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.885883 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.885895 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:17Z","lastTransitionTime":"2026-01-21T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.914648 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 15:36:45.607132038 +0000 UTC Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.988798 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.988841 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.988857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.988876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:17 crc kubenswrapper[4799]: I0121 17:34:17.988888 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:17Z","lastTransitionTime":"2026-01-21T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.091011 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.091069 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.091080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.091098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.091109 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.193384 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.193451 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.193462 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.193479 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.193490 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.295878 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.295935 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.295951 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.295972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.295987 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.398511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.398575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.398586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.398606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.398617 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.501581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.501632 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.501642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.501660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.501670 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.604589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.604651 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.604663 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.604685 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.604698 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.707402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.707499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.707523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.707558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.707583 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.810525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.810571 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.810582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.810600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.810611 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.912548 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.912594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.912606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.912628 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.912642 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:18Z","lastTransitionTime":"2026-01-21T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:18 crc kubenswrapper[4799]: I0121 17:34:18.914995 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 08:47:49.190912757 +0000 UTC Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.015825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.015867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.015875 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.015893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.015902 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.118608 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.118649 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.118659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.118677 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.118688 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.157483 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.157550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.157562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.157582 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.157591 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.173212 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.178819 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.178879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.178892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.178912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.178922 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.195074 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.199281 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.199308 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.199318 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.199334 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.199346 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.204969 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.204978 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.205043 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.205089 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.205110 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.205215 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.205314 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.205424 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.213452 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.217482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.217550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.217561 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.217578 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.217589 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.233340 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.237233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.237262 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.237274 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.237294 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.237306 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.249885 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:19Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:19 crc kubenswrapper[4799]: E0121 17:34:19.250049 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.251531 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.251556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.251568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.251586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.251598 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.354775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.354839 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.354853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.354872 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.354883 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.458390 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.458437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.458446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.458464 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.458474 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.560763 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.560827 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.560840 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.560860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.560872 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.663875 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.663933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.663943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.663966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.663984 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.767289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.767345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.767355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.767387 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.767401 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.870045 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.870098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.870110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.870154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.870169 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.916293 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 15:30:45.911247477 +0000 UTC Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.973112 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.973215 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.973224 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.973241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:19 crc kubenswrapper[4799]: I0121 17:34:19.973252 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:19Z","lastTransitionTime":"2026-01-21T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.075828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.075874 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.075882 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.075900 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.075913 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.178947 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.178995 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.179008 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.179026 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.179039 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.282000 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.282069 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.282085 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.282115 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.282199 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.385352 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.385393 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.385402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.385418 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.385430 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.487948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.487989 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.488000 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.488016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.488024 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.590260 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.590334 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.590372 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.590405 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.590434 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.693009 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.693197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.693243 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.693296 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.693325 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.796089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.796170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.796185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.796204 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.796216 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.898525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.898562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.898570 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.898587 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.898597 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:20Z","lastTransitionTime":"2026-01-21T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:20 crc kubenswrapper[4799]: I0121 17:34:20.916609 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 22:09:22.449257017 +0000 UTC Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.001925 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.001973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.001984 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.002003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.002014 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.105305 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.105358 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.105368 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.105386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.105396 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.204090 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.204186 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:21 crc kubenswrapper[4799]: E0121 17:34:21.204274 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:21 crc kubenswrapper[4799]: E0121 17:34:21.204362 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.204471 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:21 crc kubenswrapper[4799]: E0121 17:34:21.204654 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.204730 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:21 crc kubenswrapper[4799]: E0121 17:34:21.204781 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.208030 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.208054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.208064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.208077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.208086 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.220411 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.310209 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.310250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.310259 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.310275 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.310285 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.413294 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.413333 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.413341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.413356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.413365 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.515917 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.515969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.515982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.516000 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.516010 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.619437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.619482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.619492 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.619508 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.619518 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.723016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.723062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.723070 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.723087 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.723097 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.825728 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.825788 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.825798 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.825817 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.825827 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.917682 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 20:56:15.52247763 +0000 UTC Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.928695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.928727 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.928738 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.928755 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:21 crc kubenswrapper[4799]: I0121 17:34:21.928766 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:21Z","lastTransitionTime":"2026-01-21T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.033211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.033254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.033268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.033290 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.033306 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.137044 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.137111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.137154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.137177 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.137188 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.206189 4799 scope.go:117] "RemoveContainer" containerID="32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.240494 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.240563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.240575 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.240650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.240672 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.344305 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.344363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.344378 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.344401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.344419 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.448960 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.449042 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.449069 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.449104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.449191 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.553070 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.553213 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.553231 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.553255 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.553271 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.656691 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.656747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.656761 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.656786 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.656804 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.760623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.760676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.760688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.760706 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.760718 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.862894 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.862935 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.862947 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.862967 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.862980 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.918511 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 18:31:24.501211635 +0000 UTC Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.965864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.965913 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.965928 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.965953 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:22 crc kubenswrapper[4799]: I0121 17:34:22.965969 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:22Z","lastTransitionTime":"2026-01-21T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.069618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.069689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.069722 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.069755 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.069777 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.172771 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.172802 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.172813 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.172832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.172844 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.204966 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:23 crc kubenswrapper[4799]: E0121 17:34:23.205166 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.205381 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:23 crc kubenswrapper[4799]: E0121 17:34:23.205442 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.205562 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:23 crc kubenswrapper[4799]: E0121 17:34:23.205638 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.205784 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:23 crc kubenswrapper[4799]: E0121 17:34:23.205839 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.276481 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.276516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.276525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.276540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.276550 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.380032 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.380080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.380089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.380108 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.380119 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.483083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.483157 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.483175 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.483197 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.483210 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.585924 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.585984 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.585994 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.586013 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.586024 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.600934 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/2.log" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.604431 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.604962 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.619838 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.642553 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6dd02d-892e-4455-8617-d19f2b2e093e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c90a67efcfae35a96cdfc2a14d3150b8872e5e46ff28468d5d1910d34041b6dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://356e8e169444fda17290b5c8c7b6f741b075a104e1a7a247411bbcfb20b0ef4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98274f97e4084169a2703e4c4eba552ea91868bfde19278d9365ce50d7a8cff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36b3ce4ad807a78bd8e7ffaa8e43beabdecde4f1e8b537d5b8e55c9cf05f67b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c075cb965f6955fc583d6acbfd34a5746a92b6f2f30f1ddfdbc59b271050ad7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.656297 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.669467 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.682494 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.688252 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.688312 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.688322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.688343 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.688355 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.698787 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.721827 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:34:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.737315 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.756313 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.772859 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.790220 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:15Z\\\",\\\"message\\\":\\\"2026-01-21T17:33:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed\\\\n2026-01-21T17:33:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed to /host/opt/cni/bin/\\\\n2026-01-21T17:33:30Z [verbose] multus-daemon started\\\\n2026-01-21T17:33:30Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:34:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:34:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.791247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.791286 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.791299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.791321 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.791341 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.819000 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.851836 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.878942 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.894017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.894447 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.894532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.894599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.894670 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.896645 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.914643 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.918606 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 19:29:29.017461372 +0000 UTC Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.929261 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.941361 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5108a8b3-b201-4c5c-bb12-038eeb9b4a61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bc3cd5709df489fdb1fe0890f905c648166df1a65093ecd00f15052c59e64e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.952955 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:23Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.997015 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.997063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.997072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.997090 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:23 crc kubenswrapper[4799]: I0121 17:34:23.997099 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:23Z","lastTransitionTime":"2026-01-21T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.100874 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.100923 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.100933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.100953 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.100966 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.203816 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.203851 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.203861 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.203876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.203887 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.223046 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.244118 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.257270 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5108a8b3-b201-4c5c-bb12-038eeb9b4a61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bc3cd5709df489fdb1fe0890f905c648166df1a65093ecd00f15052c59e64e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.270055 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.283983 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.306463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.306515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.306529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.306550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.306562 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.309149 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6dd02d-892e-4455-8617-d19f2b2e093e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c90a67efcfae35a96cdfc2a14d3150b8872e5e46ff28468d5d1910d34041b6dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://356e8e169444fda17290b5c8c7b6f741b075a104e1a7a247411bbcfb20b0ef4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98274f97e4084169a2703e4c4eba552ea91868bfde19278d9365ce50d7a8cff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36b3ce4ad807a78bd8e7ffaa8e43beabdecde4f1e8b537d5b8e55c9cf05f67b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c075cb965f6955fc583d6acbfd34a5746a92b6f2f30f1ddfdbc59b271050ad7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.322745 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.336515 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.351296 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.364458 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.386973 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:34:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.402439 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.409199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.409271 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.409283 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.409308 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.409323 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.418015 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.430824 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.447107 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:15Z\\\",\\\"message\\\":\\\"2026-01-21T17:33:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed\\\\n2026-01-21T17:33:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed to /host/opt/cni/bin/\\\\n2026-01-21T17:33:30Z [verbose] multus-daemon started\\\\n2026-01-21T17:33:30Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:34:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:34:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.459526 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.474492 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.489357 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.503014 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.512238 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.512284 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.512295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.512317 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.512334 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.609832 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/3.log" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.610804 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/2.log" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.613845 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" exitCode=1 Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.613890 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.613912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.613938 4799 scope.go:117] "RemoveContainer" containerID="32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.613952 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.613989 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.614014 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.614031 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.614933 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:34:24 crc kubenswrapper[4799]: E0121 17:34:24.615220 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.655531 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6dd02d-892e-4455-8617-d19f2b2e093e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c90a67efcfae35a96cdfc2a14d3150b8872e5e46ff28468d5d1910d34041b6dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://356e8e169444fda17290b5c8c7b6f741b075a104e1a7a247411bbcfb20b0ef4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98274f97e4084169a2703e4c4eba552ea91868bfde19278d9365ce50d7a8cff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36b3ce4ad807a78bd8e7ffaa8e43beabdecde4f1e8b537d5b8e55c9cf05f67b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c075cb965f6955fc583d6acbfd34a5746a92b6f2f30f1ddfdbc59b271050ad7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.669584 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.683756 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.697248 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.710829 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.717658 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.717726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.717745 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.717768 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.717786 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.731713 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32cdde4f240fda078ab1623369999e0dd0063ee9d148f0e8802b05d659e064d9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:33:56Z\\\",\\\"message\\\":\\\"ePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:53, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.10\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9154, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0121 17:33:55.203455 6420 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-6qqjg in node crc\\\\nI0121 17:33:55.203456 6420 services_controller.go:445] Built service openshift-dns/dns-default LB template configs for network=default: []services.lbConfig(nil)\\\\nF0121 17:33:55.203462 6420 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node ne\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:24Z\\\",\\\"message\\\":\\\"tor/metrics]} name:Service_openshift-etcd-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.188:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {53c717ca-2174-4315-bb03-c937a9c0d9b6}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0121 17:34:24.025467 6822 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-etcd-operator/metrics]} name:Service_openshift-etcd-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.188:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {53c717ca-2174-4315-bb03-c937a9c0d9b6}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0121 17:34:24.025491 6822 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}\\\\nI0121 17:34:24.025480 6822 port_cache.go:96] port-ca\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:34:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.754039 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.771988 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.786744 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.801345 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:15Z\\\",\\\"message\\\":\\\"2026-01-21T17:33:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed\\\\n2026-01-21T17:33:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed to /host/opt/cni/bin/\\\\n2026-01-21T17:33:30Z [verbose] multus-daemon started\\\\n2026-01-21T17:33:30Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:34:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:34:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.817108 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.821602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.821660 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.821670 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.821689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.821705 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.845624 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.860304 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.874732 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.886099 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.899454 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.911235 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5108a8b3-b201-4c5c-bb12-038eeb9b4a61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bc3cd5709df489fdb1fe0890f905c648166df1a65093ecd00f15052c59e64e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.920181 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 06:52:10.30636597 +0000 UTC Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.921967 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.924179 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.924239 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.924250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.924271 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.924282 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:24Z","lastTransitionTime":"2026-01-21T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:24 crc kubenswrapper[4799]: I0121 17:34:24.939020 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:24Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.027441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.027505 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.027515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.027534 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.027545 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.130767 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.130831 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.130845 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.130870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.130891 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.204867 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.204990 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.205081 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.205400 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:25 crc kubenswrapper[4799]: E0121 17:34:25.205392 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:25 crc kubenswrapper[4799]: E0121 17:34:25.205493 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:25 crc kubenswrapper[4799]: E0121 17:34:25.205635 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:25 crc kubenswrapper[4799]: E0121 17:34:25.205829 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.234273 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.234355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.234381 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.234409 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.234428 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.337946 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.338018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.338036 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.338055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.338069 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.442114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.442193 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.442203 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.442224 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.442240 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.545241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.545686 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.545825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.545970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.546395 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.621208 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/3.log" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.626383 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:34:25 crc kubenswrapper[4799]: E0121 17:34:25.626972 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.645526 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.654245 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.654279 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.654288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.654304 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.654315 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.662326 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.675694 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5108a8b3-b201-4c5c-bb12-038eeb9b4a61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bc3cd5709df489fdb1fe0890f905c648166df1a65093ecd00f15052c59e64e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.689797 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.701918 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.722485 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6dd02d-892e-4455-8617-d19f2b2e093e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c90a67efcfae35a96cdfc2a14d3150b8872e5e46ff28468d5d1910d34041b6dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://356e8e169444fda17290b5c8c7b6f741b075a104e1a7a247411bbcfb20b0ef4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98274f97e4084169a2703e4c4eba552ea91868bfde19278d9365ce50d7a8cff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36b3ce4ad807a78bd8e7ffaa8e43beabdecde4f1e8b537d5b8e55c9cf05f67b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c075cb965f6955fc583d6acbfd34a5746a92b6f2f30f1ddfdbc59b271050ad7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.736780 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.749063 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.759609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.759657 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.759669 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.759689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.759702 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.770446 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.784371 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.807328 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:24Z\\\",\\\"message\\\":\\\"tor/metrics]} name:Service_openshift-etcd-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.188:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {53c717ca-2174-4315-bb03-c937a9c0d9b6}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0121 17:34:24.025467 6822 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-etcd-operator/metrics]} name:Service_openshift-etcd-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.188:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {53c717ca-2174-4315-bb03-c937a9c0d9b6}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0121 17:34:24.025491 6822 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}\\\\nI0121 17:34:24.025480 6822 port_cache.go:96] port-ca\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:34:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.822504 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.835463 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.848626 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.861883 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:15Z\\\",\\\"message\\\":\\\"2026-01-21T17:33:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed\\\\n2026-01-21T17:33:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed to /host/opt/cni/bin/\\\\n2026-01-21T17:33:30Z [verbose] multus-daemon started\\\\n2026-01-21T17:33:30Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:34:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:34:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.862628 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.862661 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.862671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.862686 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.862697 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.874850 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.890457 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.903472 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.920524 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 14:45:30.810405395 +0000 UTC Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.921696 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:25Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.965441 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.965493 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.965507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.965528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:25 crc kubenswrapper[4799]: I0121 17:34:25.965541 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:25Z","lastTransitionTime":"2026-01-21T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.233912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.233958 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.233967 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.233982 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.233995 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:26Z","lastTransitionTime":"2026-01-21T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.336917 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.336954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.336964 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.336979 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.336989 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:26Z","lastTransitionTime":"2026-01-21T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.440455 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.440648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.440675 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.440710 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.440735 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:26Z","lastTransitionTime":"2026-01-21T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.545314 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.545380 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.545390 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.545418 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.545434 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:26Z","lastTransitionTime":"2026-01-21T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.647931 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.647972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.648002 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.648023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.648036 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:26Z","lastTransitionTime":"2026-01-21T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.751480 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.751529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.751546 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.751565 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.751579 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:26Z","lastTransitionTime":"2026-01-21T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.855229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.855823 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.855836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.855860 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.855873 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:26Z","lastTransitionTime":"2026-01-21T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.921222 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 08:03:34.80687014 +0000 UTC Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.958828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.958877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.958893 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.958915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:26 crc kubenswrapper[4799]: I0121 17:34:26.959198 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:26Z","lastTransitionTime":"2026-01-21T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.135055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.135109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.135174 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.135199 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.135214 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.204313 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.204345 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.204360 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:27 crc kubenswrapper[4799]: E0121 17:34:27.204472 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.204533 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:27 crc kubenswrapper[4799]: E0121 17:34:27.204575 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:27 crc kubenswrapper[4799]: E0121 17:34:27.204704 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:27 crc kubenswrapper[4799]: E0121 17:34:27.204789 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.238167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.238246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.238271 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.238321 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.238343 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.342332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.342373 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.342386 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.342408 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.342421 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.446225 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.446259 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.446268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.446289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.446304 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.550249 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.550345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.550394 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.550434 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.550460 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.653246 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.653317 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.653328 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.653355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.653368 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.759824 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.759877 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.759889 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.759907 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.759916 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.862656 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.862709 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.862726 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.862747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.862760 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.921675 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 01:54:48.251398964 +0000 UTC Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.966828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.966904 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.966933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.966966 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:27 crc kubenswrapper[4799]: I0121 17:34:27.966994 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:27Z","lastTransitionTime":"2026-01-21T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.070363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.070442 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.070466 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.070507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.070531 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.173606 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.173681 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.173693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.173710 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.173722 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.277067 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.277119 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.277149 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.277169 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.277182 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.380943 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.381029 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.381047 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.381072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.381090 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.513027 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.513085 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.513099 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.513122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.513157 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.619892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.619962 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.619987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.620020 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.620044 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.724019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.724078 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.724088 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.724106 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.724121 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.828237 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.828695 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.828837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.829282 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.829442 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.922471 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 19:36:30.85373151 +0000 UTC Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.932837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.932880 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.932891 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.932908 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:28 crc kubenswrapper[4799]: I0121 17:34:28.932921 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:28Z","lastTransitionTime":"2026-01-21T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.036683 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.036740 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.036755 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.036787 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.036804 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.140226 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.140275 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.140287 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.140314 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.140333 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.204315 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.204456 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.204582 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.204607 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.204489 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.204755 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.204866 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.204976 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.243732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.243768 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.243778 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.243793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.243807 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.269577 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.269900 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:33.269852055 +0000 UTC m=+159.896142128 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.347446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.347502 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.347519 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.347539 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.347552 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.370601 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.370660 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.370862 4799 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.370876 4799 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.370944 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:35:33.370929233 +0000 UTC m=+159.997219256 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.371036 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-21 17:35:33.370994935 +0000 UTC m=+159.997285128 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.450459 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.450540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.450558 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.450586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.450605 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.553650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.553697 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.553710 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.553731 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.553746 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.572655 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.572787 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.572982 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.573002 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.573014 4799 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.573050 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.573120 4799 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.573163 4799 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.573093 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-21 17:35:33.57307883 +0000 UTC m=+160.199368853 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.573356 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-21 17:35:33.573331337 +0000 UTC m=+160.199621510 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.646191 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.646253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.646269 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.646289 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.646303 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.661362 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.665589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.665637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.665650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.665671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.665684 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.679014 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.683803 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.683832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.683842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.683858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.683869 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.697441 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.702593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.702649 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.702665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.702693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.702707 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.718601 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.723969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.724026 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.724038 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.724063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.724078 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.737797 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:29Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:29 crc kubenswrapper[4799]: E0121 17:34:29.737932 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.739796 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.739830 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.739842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.739863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.739875 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.843836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.843897 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.843911 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.843932 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.843946 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.923494 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 04:00:42.317086474 +0000 UTC Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.947033 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.947088 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.947099 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.947122 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:29 crc kubenswrapper[4799]: I0121 17:34:29.947150 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:29Z","lastTransitionTime":"2026-01-21T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.051347 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.051432 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.051456 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.051489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.051512 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.155098 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.155223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.155254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.155277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.155291 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.258187 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.258254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.258277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.258312 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.258337 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.366728 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.366797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.366814 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.366870 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.366928 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.470704 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.470754 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.470771 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.470791 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.470803 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.574890 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.574945 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.574955 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.574973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.574985 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.678593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.678650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.678663 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.678683 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.678698 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.782478 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.782540 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.782549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.782567 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.782588 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.885826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.885868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.885880 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.885901 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.885915 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.924426 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 20:40:28.952151162 +0000 UTC Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.990117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.990220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.990239 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.990270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:30 crc kubenswrapper[4799]: I0121 17:34:30.990291 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:30Z","lastTransitionTime":"2026-01-21T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.095017 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.095085 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.095109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.095192 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.095218 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.200018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.200086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.200099 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.200121 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.200157 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.204964 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.204963 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.205008 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.205037 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:31 crc kubenswrapper[4799]: E0121 17:34:31.205530 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:31 crc kubenswrapper[4799]: E0121 17:34:31.205719 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:31 crc kubenswrapper[4799]: E0121 17:34:31.205948 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:31 crc kubenswrapper[4799]: E0121 17:34:31.206175 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.303118 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.303187 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.303196 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.303212 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.303221 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.406590 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.406648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.406674 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.406701 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.406717 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.510515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.510583 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.510602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.510627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.510639 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.613256 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.613310 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.613324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.613345 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.613362 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.716411 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.716478 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.716499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.716529 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.716550 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.819863 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.819915 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.819928 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.819948 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.819962 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.923062 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.923172 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.923198 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.923228 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.923269 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:31Z","lastTransitionTime":"2026-01-21T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:31 crc kubenswrapper[4799]: I0121 17:34:31.925299 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 04:36:11.336572402 +0000 UTC Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.027081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.027147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.027162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.027188 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.027204 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.130366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.130413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.130422 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.130437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.130450 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.234170 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.234224 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.234238 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.234262 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.234275 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.338172 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.338778 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.338793 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.338818 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.338838 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.442563 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.442641 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.442661 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.442688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.442709 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.546849 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.547303 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.547464 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.547589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.547713 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.651636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.651699 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.651724 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.651758 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.651790 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.755770 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.755829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.755844 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.755869 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.755888 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.860119 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.860235 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.860265 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.860293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.860314 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.926443 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 17:36:36.771835067 +0000 UTC Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.963309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.963468 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.963487 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.963513 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:32 crc kubenswrapper[4799]: I0121 17:34:32.963531 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:32Z","lastTransitionTime":"2026-01-21T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.067566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.067625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.067641 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.067670 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.067687 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.170918 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.170978 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.170999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.171022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.171038 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.204367 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:33 crc kubenswrapper[4799]: E0121 17:34:33.204495 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.204663 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.204750 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.204764 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:33 crc kubenswrapper[4799]: E0121 17:34:33.205028 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:33 crc kubenswrapper[4799]: E0121 17:34:33.204903 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:33 crc kubenswrapper[4799]: E0121 17:34:33.205073 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.274275 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.275106 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.275223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.275250 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.275304 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.379270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.379382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.379402 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.379428 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.379447 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.483261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.483306 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.483319 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.483338 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.483353 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.586940 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.587081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.587120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.587364 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.587397 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.690329 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.690364 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.690374 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.690390 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.690400 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.793443 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.793511 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.793523 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.793545 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.793558 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.896572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.896615 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.896632 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.896682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.896696 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:33Z","lastTransitionTime":"2026-01-21T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:33 crc kubenswrapper[4799]: I0121 17:34:33.926569 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 23:20:14.5547765 +0000 UTC Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.000205 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.000261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.000279 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.000305 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.000320 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.103373 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.103457 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.103478 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.103504 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.103523 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.206985 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.207053 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.207076 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.207104 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.207150 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.224951 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-sl7lv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3004f2e1-bd6a-46a1-a6d9-835472f616b8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:15Z\\\",\\\"message\\\":\\\"2026-01-21T17:33:30+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed\\\\n2026-01-21T17:33:30+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fb8a183-ffa9-4480-a94c-89b2337651ed to /host/opt/cni/bin/\\\\n2026-01-21T17:33:30Z [verbose] multus-daemon started\\\\n2026-01-21T17:33:30Z [verbose] Readiness Indicator file check\\\\n2026-01-21T17:34:15Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:34:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p4tcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-sl7lv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.241500 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.262551 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a9a6c57-0a82-4115-b895-c414b0cc6a3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bc7d0f10fd39604c58cec9e8817336507418044cef396d7b37135712f4f4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8gxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-snc2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.285412 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6770819e-2fef-4203-9c5f-504628af7b66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-21T17:34:24Z\\\",\\\"message\\\":\\\"tor/metrics]} name:Service_openshift-etcd-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.188:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {53c717ca-2174-4315-bb03-c937a9c0d9b6}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0121 17:34:24.025467 6822 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-etcd-operator/metrics]} name:Service_openshift-etcd-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.188:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {53c717ca-2174-4315-bb03-c937a9c0d9b6}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0121 17:34:24.025491 6822 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}\\\\nI0121 17:34:24.025480 6822 port_cache.go:96] port-ca\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:34:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p8s9k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6qqjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.296696 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7q999" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7796adba-b973-44ee-b0c4-c0df544250e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gzlq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7q999\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.306460 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1185e548-340d-42f6-b6e5-8b4826a43153\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5076079801d58a08432af79949fb03da7b9445180a950bdf1310638edfa8b95d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43e360aa041ebc4c926c01dc20eb1920c9e121b239f76c6f312d4928019f38c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38ff097f5dbab0def600436177a09f212347b705be6b75949d9646a79ab4e2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c12fe713892fcaac8c399250285f84c3398d79fad5f2b44ae5c82415179b3f8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.310465 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.310489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.310498 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.310513 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.310528 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.319436 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b352eabeb8605aef39f432a4a17981ad63eb0c1c9a72a978e8ffe12c922eabec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.335249 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b42039ccf4830358f9e7ed38f30e58eb045ff2d7146b2b2a4a0a624ead67a57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.347462 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-85gfq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4e77c27-c6f5-4b6b-a8fd-4595ea0a58ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f3161bdc55f3a6c8adf41491f137f18e54c9bf156a9d3c1752a97009c01357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tscm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-85gfq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.361357 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"482472d7-f466-4155-8743-5469a2d218cc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-21T17:33:20Z\\\",\\\"message\\\":\\\"file observer\\\\nW0121 17:33:20.507031 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0121 17:33:20.507419 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0121 17:33:20.509818 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-816400208/tls.crt::/tmp/serving-cert-816400208/tls.key\\\\\\\"\\\\nI0121 17:33:20.775494 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0121 17:33:20.777854 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0121 17:33:20.777873 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0121 17:33:20.777940 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0121 17:33:20.777953 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0121 17:33:20.783397 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0121 17:33:20.783417 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783422 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0121 17:33:20.783426 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0121 17:33:20.783429 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0121 17:33:20.783432 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0121 17:33:20.783435 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0121 17:33:20.783613 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0121 17:33:20.788589 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.374875 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.384804 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hpm7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4f19e394-f753-4802-a65b-a2d461af624b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6eb21f47c39826dcabf0c326ade89d5a24f2499ea516f9b4fa80f332df21c818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-th2t8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hpm7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.406093 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-bckxf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe826811-0f15-453a-9849-dae49637b629\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3f4fd7659f6461d9f8dbdd0ad0be27b623bfeadd1538a4125824bd83505074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://017e4e43bdd98d3b9f192b9036e74be931854e9907d59ce75cad540b2ebd7b49\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4cf8d6fb7123991c81fb4c1add07957e1d80d97b746a9963db53dc26cb2f192b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc28e92b434db7f80f9452eb74e6449c65b85a8f6a3d45faf276ad64a69fe610\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c3658495efc9a17327e74c43a981f4ab9631af91437f3d6d53ca9feb3fba92\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c1123d4eaac9a1e4073b984489927023d5915700e88958ec30ff3915db7009\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f97a6cf08cb86ab4a6c79747b34fd9d930fff254f756e4ace8e0694857c7acd8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ljvgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:25Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-bckxf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.413184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.413277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.413297 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.413688 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.413940 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.424152 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"194d1f30-7188-434a-8ff1-712bb40a4bdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25a66541211bb554bfad4776b1852a019ba7bbaee5a1a7dcecd8b5da46409b82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84906e6714a9a500a9e9027fde9832d0d820c15b5bd986285965cf8022855cea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d6327d0b4c154129bf0be5f74ed0b7751a138ac852079cddfba26d8b0f0bf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.438651 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5108a8b3-b201-4c5c-bb12-038eeb9b4a61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bc3cd5709df489fdb1fe0890f905c648166df1a65093ecd00f15052c59e64e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe229c5bd53b1207c5b1dd29eba6ffa1ebcb82e00a7d07de733e375527f7d2af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.453556 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:25Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.467765 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"717a7f1f-de20-4d1a-a943-0aef95ea6b45\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8154e9b4882c9b8937fa7e53026dc392aea0c69041f3e9a6cc305e0a60116fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a990cc2385975e0981e97693c0e01c1d609d2cc0a15d2dd8ea2348c79c58abd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95xwd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-6rb85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.490468 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af6dd02d-892e-4455-8617-d19f2b2e093e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-21T17:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c90a67efcfae35a96cdfc2a14d3150b8872e5e46ff28468d5d1910d34041b6dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://356e8e169444fda17290b5c8c7b6f741b075a104e1a7a247411bbcfb20b0ef4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98274f97e4084169a2703e4c4eba552ea91868bfde19278d9365ce50d7a8cff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d36b3ce4ad807a78bd8e7ffaa8e43beabdecde4f1e8b537d5b8e55c9cf05f67b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c075cb965f6955fc583d6acbfd34a5746a92b6f2f30f1ddfdbc59b271050ad7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:32:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64e8d6ff18f53190b7b6a2b39949496833ba1d6551612c97959d4e55e86abdb0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85585b0d402e98e6f91410bef0666ba47bc3b9138153aeda2023decfa1c8a641\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f3f8f40740daa57712389592e60faf5fa8e5433c827fe0fa540e4f19f75244d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-21T17:32:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-21T17:32:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-21T17:32:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.505208 4799 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-21T17:33:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0d36fd5f4968ee7536d0025e2ef3c5595c26541a8e96da3ea1ffa5d104a9df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86e7aed30d9b344092881c09d66f3436de0ad44b47946fea6be9de285ebedc91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-21T17:33:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:34Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.516553 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.516614 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.516627 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.516649 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.516664 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.620659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.620711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.620742 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.620761 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.620776 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.723703 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.723757 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.723769 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.723789 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.723802 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.827332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.827423 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.827484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.827520 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.827544 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.927501 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 15:21:41.645506703 +0000 UTC Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.931211 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.931287 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.931401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.931440 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:34 crc kubenswrapper[4799]: I0121 17:34:34.931464 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:34Z","lastTransitionTime":"2026-01-21T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.038505 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.038593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.038618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.038650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.038673 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.142149 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.142206 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.142221 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.142242 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.142256 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.204119 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.204210 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.204184 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.204497 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:35 crc kubenswrapper[4799]: E0121 17:34:35.204642 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:35 crc kubenswrapper[4799]: E0121 17:34:35.204808 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:35 crc kubenswrapper[4799]: E0121 17:34:35.205000 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:35 crc kubenswrapper[4799]: E0121 17:34:35.205352 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.245491 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.245542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.245556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.245576 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.245588 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.348209 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.348299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.348315 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.348341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.348358 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.451398 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.451463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.451472 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.451492 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.451504 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.554419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.554468 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.554481 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.554500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.554513 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.658230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.658316 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.658353 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.658385 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.658408 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.762232 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.762294 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.762303 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.762321 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.762333 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.864675 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.864712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.864720 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.864736 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.864747 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.927934 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 16:56:56.97198001 +0000 UTC Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.968111 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.968200 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.968216 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.968241 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:35 crc kubenswrapper[4799]: I0121 17:34:35.968254 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:35Z","lastTransitionTime":"2026-01-21T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.071625 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.071676 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.071690 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.071711 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.071727 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.174294 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.174338 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.174356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.174375 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.174387 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.277488 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.277560 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.277576 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.277597 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.277614 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.380954 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.381010 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.381022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.381045 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.381061 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.484454 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.484515 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.484524 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.484544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.484555 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.588341 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.588411 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.588513 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.588545 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.588570 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.691083 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.691149 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.691163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.691181 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.691192 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.795220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.795266 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.795278 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.795301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.795319 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.899739 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.899810 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.899824 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.899852 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.899866 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:36Z","lastTransitionTime":"2026-01-21T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:36 crc kubenswrapper[4799]: I0121 17:34:36.928559 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 03:40:41.570172975 +0000 UTC Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.003091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.003148 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.003161 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.003180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.003194 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.106019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.106077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.106090 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.106110 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.106144 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.204260 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.204359 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:37 crc kubenswrapper[4799]: E0121 17:34:37.204478 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:37 crc kubenswrapper[4799]: E0121 17:34:37.204569 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.204655 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.204644 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:37 crc kubenswrapper[4799]: E0121 17:34:37.205196 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:37 crc kubenswrapper[4799]: E0121 17:34:37.205309 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.205564 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:34:37 crc kubenswrapper[4799]: E0121 17:34:37.205770 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.209343 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.209389 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.209401 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.209419 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.209432 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.313599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.313659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.313668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.313690 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.313702 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.417758 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.417832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.417847 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.417876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.417890 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.521604 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.521672 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.521743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.521777 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.521796 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.625923 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.625980 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.625990 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.626015 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.626029 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.728621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.728682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.728700 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.728729 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.728748 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.832116 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.832223 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.832247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.832308 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.832333 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.929686 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 20:43:12.7709426 +0000 UTC Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.935463 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.935499 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.935509 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.935525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:37 crc kubenswrapper[4799]: I0121 17:34:37.935536 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:37Z","lastTransitionTime":"2026-01-21T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.038586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.038655 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.038673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.038702 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.038722 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.141829 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.141944 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.141970 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.142054 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.142119 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.318702 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.318753 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.318767 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.318788 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.318801 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.421637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.421680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.421692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.421712 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.421726 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.524775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.524851 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.524866 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.524889 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.524906 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.627797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.627878 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.627914 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.627946 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.627968 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.731072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.731185 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.731202 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.731230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.731250 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.834024 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.834095 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.834114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.834160 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.834176 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.929991 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 00:40:00.466261477 +0000 UTC Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.937684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.937743 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.937765 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.937798 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:38 crc kubenswrapper[4799]: I0121 17:34:38.937823 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:38Z","lastTransitionTime":"2026-01-21T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.041487 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.041533 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.041544 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.041561 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.041575 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.144681 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.144764 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.144775 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.144798 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.144812 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.204586 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.204741 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:39 crc kubenswrapper[4799]: E0121 17:34:39.204777 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.204588 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.204620 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:39 crc kubenswrapper[4799]: E0121 17:34:39.204912 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:39 crc kubenswrapper[4799]: E0121 17:34:39.204969 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:39 crc kubenswrapper[4799]: E0121 17:34:39.205017 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.248600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.248664 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.248682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.248715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.248732 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.353689 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.353721 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.353730 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.353746 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.353756 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.457121 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.457247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.457266 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.457295 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.457315 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.560780 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.560837 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.560847 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.560867 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.560880 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.663556 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.663642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.663658 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.663682 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.663693 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.766505 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.766557 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.766568 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.766585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.766596 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.869555 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.869596 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.869608 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.869623 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.869642 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.930878 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 11:42:00.717493975 +0000 UTC Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.973218 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.973283 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.973298 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.973325 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.973339 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.981023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.981091 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.981398 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.981435 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:39 crc kubenswrapper[4799]: I0121 17:34:39.981461 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:39Z","lastTransitionTime":"2026-01-21T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:39 crc kubenswrapper[4799]: E0121 17:34:39.997773 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:39Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.003395 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.003450 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.003464 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.003484 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.004831 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: E0121 17:34:40.020556 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.025642 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.025692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.025704 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.025724 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.025734 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: E0121 17:34:40.039389 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.044628 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.044680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.044693 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.044715 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.044727 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: E0121 17:34:40.059776 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.065562 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.065618 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.065628 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.065696 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.065711 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: E0121 17:34:40.080713 4799 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-21T17:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"933b271b-0519-400b-9412-6730db28e758\\\",\\\"systemUUID\\\":\\\"789ad1a6-8ab1-48c5-ae81-5ae7be5b1f10\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-21T17:34:40Z is after 2025-08-24T17:21:41Z" Jan 21 17:34:40 crc kubenswrapper[4799]: E0121 17:34:40.080859 4799 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.082973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.083016 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.083028 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.083082 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.083094 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.186072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.186156 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.186171 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.186188 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.186199 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.289703 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.289779 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.289794 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.289814 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.289850 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.393437 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.393489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.393501 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.393521 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.393534 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.496892 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.496967 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.496979 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.497000 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.497013 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.600794 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.600864 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.600882 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.600912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.600934 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.703673 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.703727 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.703738 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.703756 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.703768 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.807072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.807147 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.807164 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.807184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.807198 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.910000 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.910055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.910066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.910081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.910092 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:40Z","lastTransitionTime":"2026-01-21T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:40 crc kubenswrapper[4799]: I0121 17:34:40.931328 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 13:33:19.734370869 +0000 UTC Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.012790 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.012842 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.012857 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.012876 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.012887 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.116638 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.116707 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.116718 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.116735 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.116748 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.204885 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.204945 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.204900 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:41 crc kubenswrapper[4799]: E0121 17:34:41.205084 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:41 crc kubenswrapper[4799]: E0121 17:34:41.205214 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.205279 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:41 crc kubenswrapper[4799]: E0121 17:34:41.205347 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:41 crc kubenswrapper[4799]: E0121 17:34:41.205568 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.220254 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.220301 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.220317 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.220334 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.220346 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.322933 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.322987 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.322999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.323018 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.323029 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.426316 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.426355 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.426366 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.426382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.426393 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.529444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.529486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.529496 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.529542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.529557 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.567780 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:41 crc kubenswrapper[4799]: E0121 17:34:41.567983 4799 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:34:41 crc kubenswrapper[4799]: E0121 17:34:41.568117 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs podName:7796adba-b973-44ee-b0c4-c0df544250e3 nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.568087715 +0000 UTC m=+172.194377728 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs") pod "network-metrics-daemon-7q999" (UID: "7796adba-b973-44ee-b0c4-c0df544250e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.632590 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.632692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.632708 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.632732 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.632748 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.736327 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.736411 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.736423 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.736446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.736457 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.839537 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.839594 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.839605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.839632 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.839644 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.931893 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 12:40:04.625235841 +0000 UTC Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.941805 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.941897 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.941907 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.941923 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:41 crc kubenswrapper[4799]: I0121 17:34:41.941936 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:41Z","lastTransitionTime":"2026-01-21T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.044768 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.044812 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.044821 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.044843 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.044855 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.147832 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.147885 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.147896 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.147921 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.147937 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.251221 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.251266 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.251277 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.251302 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.251321 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.354220 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.354288 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.354304 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.354324 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.354337 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.457972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.458022 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.458035 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.458058 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.458071 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.561244 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.561322 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.561332 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.561350 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.561363 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.664973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.665033 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.665043 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.665064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.665077 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.768969 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.769070 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.769080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.769109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.769121 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.872825 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.872881 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.872890 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.872907 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.872917 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.932155 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 13:25:49.688048215 +0000 UTC Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.975444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.975481 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.975489 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.975504 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:42 crc kubenswrapper[4799]: I0121 17:34:42.975517 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:42Z","lastTransitionTime":"2026-01-21T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.078983 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.079046 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.079056 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.079077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.079088 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.182486 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.182542 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.182559 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.182581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.182593 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.205108 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.205220 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.205353 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:43 crc kubenswrapper[4799]: E0121 17:34:43.205459 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.205531 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:43 crc kubenswrapper[4799]: E0121 17:34:43.205740 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:43 crc kubenswrapper[4799]: E0121 17:34:43.205741 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:43 crc kubenswrapper[4799]: E0121 17:34:43.205805 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.286310 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.286363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.286375 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.286393 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.286406 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.388586 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.388624 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.388632 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.388679 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.388690 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.493400 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.493516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.493525 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.493549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.493567 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.596605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.596650 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.596661 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.596680 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.596693 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.699516 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.699552 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.699566 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.699585 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.699600 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.803696 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.803770 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.803789 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.803818 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.803837 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.907781 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.907858 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.907885 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.907920 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.907945 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:43Z","lastTransitionTime":"2026-01-21T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:43 crc kubenswrapper[4799]: I0121 17:34:43.932879 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 22:16:33.531523305 +0000 UTC Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.013339 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.013384 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.013394 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.013413 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.013425 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.117061 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.117117 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.117161 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.117180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.117192 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.219191 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.219248 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.219261 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.219281 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.219294 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.255983 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=27.255927441 podStartE2EDuration="27.255927441s" podCreationTimestamp="2026-01-21 17:34:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.255721375 +0000 UTC m=+110.882011418" watchObservedRunningTime="2026-01-21 17:34:44.255927441 +0000 UTC m=+110.882217474" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.313544 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-6rb85" podStartSLOduration=82.313518168 podStartE2EDuration="1m22.313518168s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.312479837 +0000 UTC m=+110.938769880" watchObservedRunningTime="2026-01-21 17:34:44.313518168 +0000 UTC m=+110.939808201" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.321607 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.321665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.321678 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.321707 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.321722 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.363419 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=57.363396408 podStartE2EDuration="57.363396408s" podCreationTimestamp="2026-01-21 17:33:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.3478721 +0000 UTC m=+110.974162123" watchObservedRunningTime="2026-01-21 17:34:44.363396408 +0000 UTC m=+110.989686431" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.382386 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-sl7lv" podStartSLOduration=83.382357326 podStartE2EDuration="1m23.382357326s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.381732578 +0000 UTC m=+111.008022611" watchObservedRunningTime="2026-01-21 17:34:44.382357326 +0000 UTC m=+111.008647349" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.524950 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.524999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.525009 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.525039 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.525053 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.549951 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podStartSLOduration=83.549920037 podStartE2EDuration="1m23.549920037s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.549814524 +0000 UTC m=+111.176104547" watchObservedRunningTime="2026-01-21 17:34:44.549920037 +0000 UTC m=+111.176210060" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.604033 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=75.603986299 podStartE2EDuration="1m15.603986299s" podCreationTimestamp="2026-01-21 17:33:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.603639099 +0000 UTC m=+111.229929132" watchObservedRunningTime="2026-01-21 17:34:44.603986299 +0000 UTC m=+111.230276322" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.627247 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.627276 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.627286 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.627319 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.627330 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.680448 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-85gfq" podStartSLOduration=83.68042649 podStartE2EDuration="1m23.68042649s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.657665809 +0000 UTC m=+111.283955832" watchObservedRunningTime="2026-01-21 17:34:44.68042649 +0000 UTC m=+111.306716503" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.680824 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.680818671 podStartE2EDuration="1m18.680818671s" podCreationTimestamp="2026-01-21 17:33:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.680082709 +0000 UTC m=+111.306372732" watchObservedRunningTime="2026-01-21 17:34:44.680818671 +0000 UTC m=+111.307108694" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.706324 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=23.706301101 podStartE2EDuration="23.706301101s" podCreationTimestamp="2026-01-21 17:34:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.693744312 +0000 UTC m=+111.320034335" watchObservedRunningTime="2026-01-21 17:34:44.706301101 +0000 UTC m=+111.332591124" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.707266 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-hpm7v" podStartSLOduration=83.70725862 podStartE2EDuration="1m23.70725862s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.705629562 +0000 UTC m=+111.331919585" watchObservedRunningTime="2026-01-21 17:34:44.70725862 +0000 UTC m=+111.333548643" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.725786 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-bckxf" podStartSLOduration=82.725765995 podStartE2EDuration="1m22.725765995s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:44.724476377 +0000 UTC m=+111.350766410" watchObservedRunningTime="2026-01-21 17:34:44.725765995 +0000 UTC m=+111.352056018" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.729973 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.730005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.730015 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.730029 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.730038 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.832604 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.832659 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.832669 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.832687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.832699 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.933659 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 00:56:06.925816848 +0000 UTC Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.935337 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.935363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.935371 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.935388 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:44 crc kubenswrapper[4799]: I0121 17:34:44.935399 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:44Z","lastTransitionTime":"2026-01-21T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.038207 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.038255 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.038264 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.038282 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.038291 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.141756 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.142444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.142630 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.142707 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.142785 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.204392 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.204395 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:45 crc kubenswrapper[4799]: E0121 17:34:45.204591 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.204429 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:45 crc kubenswrapper[4799]: E0121 17:34:45.204680 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.204417 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:45 crc kubenswrapper[4799]: E0121 17:34:45.204719 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:45 crc kubenswrapper[4799]: E0121 17:34:45.204764 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.246233 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.246278 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.246291 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.246309 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.246324 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.348823 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.348919 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.348932 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.348957 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.348973 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.451637 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.451674 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.451683 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.451700 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.451711 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.555187 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.555549 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.555640 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.555744 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.555814 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.658684 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.659049 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.659120 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.659256 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.659330 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.762930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.762977 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.762998 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.763023 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.763038 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.866050 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.866099 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.866109 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.866156 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.866169 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.934912 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 11:27:07.628306396 +0000 UTC Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.968105 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.968154 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.968163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.968180 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:45 crc kubenswrapper[4799]: I0121 17:34:45.968190 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:45Z","lastTransitionTime":"2026-01-21T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.070550 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.070599 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.070609 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.070626 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.070637 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.174218 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.174266 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.174276 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.174293 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.174303 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.277021 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.277066 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.277077 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.277101 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.277111 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.379379 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.379428 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.379446 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.379470 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.379484 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.482438 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.482482 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.482503 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.482528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.482540 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.585897 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.585947 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.585957 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.585975 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.585988 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.688012 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.688055 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.688063 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.688078 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.688088 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.790538 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.790589 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.790602 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.790622 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.790638 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.893828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.893879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.893891 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.893912 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.893926 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.936102 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 22:08:50.706339153 +0000 UTC Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.996425 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.996472 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.996483 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.996500 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:46 crc kubenswrapper[4799]: I0121 17:34:46.996512 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:46Z","lastTransitionTime":"2026-01-21T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.099457 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.099503 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.099514 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.099532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.099543 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.202270 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.202316 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.202327 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.202346 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.202358 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.204632 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.204628 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:47 crc kubenswrapper[4799]: E0121 17:34:47.204739 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.204635 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.204841 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:47 crc kubenswrapper[4799]: E0121 17:34:47.204918 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:47 crc kubenswrapper[4799]: E0121 17:34:47.205070 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:47 crc kubenswrapper[4799]: E0121 17:34:47.205183 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.373572 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.373668 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.373692 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.373761 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.373794 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.476786 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.476826 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.476836 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.476853 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.476865 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.579917 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.580458 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.580643 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.580808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.580962 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.684084 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.684163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.684182 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.684210 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.684223 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.787299 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.787344 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.787356 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.787375 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.787390 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.890999 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.891081 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.891114 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.891189 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.891208 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.936549 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 17:31:04.929192754 +0000 UTC Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.994213 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.994287 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.994298 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.994319 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:47 crc kubenswrapper[4799]: I0121 17:34:47.994330 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:47Z","lastTransitionTime":"2026-01-21T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.097593 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.097636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.097646 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.097665 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.097678 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.200477 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.200532 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.200547 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.200571 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.200587 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.304167 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.304253 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.304268 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.304287 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.304299 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.406902 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.406941 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.406953 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.406972 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.406984 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.510011 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.510057 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.510067 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.510086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.510104 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.612951 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.613050 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.613064 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.613086 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.613103 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.717162 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.717219 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.717229 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.717251 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.717266 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.820103 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.820192 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.820208 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.820230 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.820244 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.923748 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.923797 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.923808 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.923828 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.923841 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:48Z","lastTransitionTime":"2026-01-21T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:48 crc kubenswrapper[4799]: I0121 17:34:48.937018 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 09:37:58.894658771 +0000 UTC Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.027605 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.027677 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.027691 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.027714 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.027728 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.131433 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.131478 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.131488 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.131507 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.131518 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.205018 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.205064 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.205261 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:49 crc kubenswrapper[4799]: E0121 17:34:49.205381 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.205403 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:49 crc kubenswrapper[4799]: E0121 17:34:49.205554 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:49 crc kubenswrapper[4799]: E0121 17:34:49.205667 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:49 crc kubenswrapper[4799]: E0121 17:34:49.205818 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.234812 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.234868 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.234882 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.234903 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.234919 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.337945 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.338003 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.338014 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.338035 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.338046 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.441392 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.441444 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.441453 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.441471 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.441481 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.544072 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.544107 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.544142 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.544163 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.544173 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.647306 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.647351 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.647363 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.647382 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.647393 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.751044 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.751112 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.751161 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.751184 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.751201 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.854581 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.854636 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.854648 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.854671 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.854686 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.937487 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 00:54:53.136734251 +0000 UTC Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.958687 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.958747 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.958762 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.958782 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:49 crc kubenswrapper[4799]: I0121 17:34:49.958792 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:49Z","lastTransitionTime":"2026-01-21T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.061528 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.061588 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.061600 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.061621 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.061634 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:50Z","lastTransitionTime":"2026-01-21T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.165028 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.165080 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.165089 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.165107 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.165120 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:50Z","lastTransitionTime":"2026-01-21T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.271981 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.272051 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.272065 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.272115 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.272322 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:50Z","lastTransitionTime":"2026-01-21T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.375879 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.375920 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.375930 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.375946 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.375956 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:50Z","lastTransitionTime":"2026-01-21T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.387941 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.388005 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.388019 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.388040 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.388054 4799 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-21T17:34:50Z","lastTransitionTime":"2026-01-21T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.445094 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg"] Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.445664 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.448639 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.448642 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.448902 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.448926 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.504707 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.504778 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.504812 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.504918 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.504949 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.605602 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.605660 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.605755 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.605822 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.605845 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.605914 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.605874 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.607737 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.615599 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.626903 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e3f40836-6b5c-4f62-8f03-2ba12e686bdb-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zv6bg\" (UID: \"e3f40836-6b5c-4f62-8f03-2ba12e686bdb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.765325 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" Jan 21 17:34:50 crc kubenswrapper[4799]: W0121 17:34:50.785715 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3f40836_6b5c_4f62_8f03_2ba12e686bdb.slice/crio-b32c60e67e0cdb176cacc76900eb4cf6984a952d43ed16ce8c9df29fdf47845d WatchSource:0}: Error finding container b32c60e67e0cdb176cacc76900eb4cf6984a952d43ed16ce8c9df29fdf47845d: Status 404 returned error can't find the container with id b32c60e67e0cdb176cacc76900eb4cf6984a952d43ed16ce8c9df29fdf47845d Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.938637 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 14:50:12.122097998 +0000 UTC Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.939140 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 21 17:34:50 crc kubenswrapper[4799]: I0121 17:34:50.947560 4799 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 21 17:34:51 crc kubenswrapper[4799]: I0121 17:34:51.204471 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:51 crc kubenswrapper[4799]: I0121 17:34:51.204538 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:51 crc kubenswrapper[4799]: E0121 17:34:51.204595 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:51 crc kubenswrapper[4799]: E0121 17:34:51.204692 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:51 crc kubenswrapper[4799]: I0121 17:34:51.204485 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:51 crc kubenswrapper[4799]: E0121 17:34:51.204778 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:51 crc kubenswrapper[4799]: I0121 17:34:51.204817 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:51 crc kubenswrapper[4799]: E0121 17:34:51.204869 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:51 crc kubenswrapper[4799]: I0121 17:34:51.750552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" event={"ID":"e3f40836-6b5c-4f62-8f03-2ba12e686bdb","Type":"ContainerStarted","Data":"ead48f73d3f0642670fd95d8f075c246da844bb8369b2a87ab2657c6b747604e"} Jan 21 17:34:51 crc kubenswrapper[4799]: I0121 17:34:51.750649 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" event={"ID":"e3f40836-6b5c-4f62-8f03-2ba12e686bdb","Type":"ContainerStarted","Data":"b32c60e67e0cdb176cacc76900eb4cf6984a952d43ed16ce8c9df29fdf47845d"} Jan 21 17:34:51 crc kubenswrapper[4799]: I0121 17:34:51.768143 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zv6bg" podStartSLOduration=90.768100143 podStartE2EDuration="1m30.768100143s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:34:51.767775924 +0000 UTC m=+118.394065947" watchObservedRunningTime="2026-01-21 17:34:51.768100143 +0000 UTC m=+118.394390166" Jan 21 17:34:52 crc kubenswrapper[4799]: I0121 17:34:52.205801 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:34:52 crc kubenswrapper[4799]: E0121 17:34:52.206057 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6qqjg_openshift-ovn-kubernetes(6770819e-2fef-4203-9c5f-504628af7b66)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" Jan 21 17:34:53 crc kubenswrapper[4799]: I0121 17:34:53.204630 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:53 crc kubenswrapper[4799]: I0121 17:34:53.204717 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:53 crc kubenswrapper[4799]: I0121 17:34:53.204717 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:53 crc kubenswrapper[4799]: E0121 17:34:53.205465 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:53 crc kubenswrapper[4799]: I0121 17:34:53.205776 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:53 crc kubenswrapper[4799]: E0121 17:34:53.205853 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:53 crc kubenswrapper[4799]: E0121 17:34:53.206693 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:53 crc kubenswrapper[4799]: E0121 17:34:53.206851 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:54 crc kubenswrapper[4799]: E0121 17:34:54.083816 4799 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 21 17:34:54 crc kubenswrapper[4799]: E0121 17:34:54.543801 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:34:55 crc kubenswrapper[4799]: I0121 17:34:55.205121 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:55 crc kubenswrapper[4799]: I0121 17:34:55.205204 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:55 crc kubenswrapper[4799]: I0121 17:34:55.205211 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:55 crc kubenswrapper[4799]: I0121 17:34:55.205495 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:55 crc kubenswrapper[4799]: E0121 17:34:55.205554 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:55 crc kubenswrapper[4799]: E0121 17:34:55.205675 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:55 crc kubenswrapper[4799]: E0121 17:34:55.205727 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:55 crc kubenswrapper[4799]: E0121 17:34:55.205808 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:57 crc kubenswrapper[4799]: I0121 17:34:57.204289 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:57 crc kubenswrapper[4799]: I0121 17:34:57.204345 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:57 crc kubenswrapper[4799]: I0121 17:34:57.204368 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:57 crc kubenswrapper[4799]: I0121 17:34:57.204406 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:57 crc kubenswrapper[4799]: E0121 17:34:57.204476 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:57 crc kubenswrapper[4799]: E0121 17:34:57.204575 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:57 crc kubenswrapper[4799]: E0121 17:34:57.204659 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:57 crc kubenswrapper[4799]: E0121 17:34:57.204661 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:59 crc kubenswrapper[4799]: I0121 17:34:59.204158 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:34:59 crc kubenswrapper[4799]: I0121 17:34:59.204201 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:34:59 crc kubenswrapper[4799]: I0121 17:34:59.204335 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:34:59 crc kubenswrapper[4799]: E0121 17:34:59.204363 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:34:59 crc kubenswrapper[4799]: I0121 17:34:59.204463 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:34:59 crc kubenswrapper[4799]: E0121 17:34:59.204522 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:34:59 crc kubenswrapper[4799]: E0121 17:34:59.204691 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:34:59 crc kubenswrapper[4799]: E0121 17:34:59.204768 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:34:59 crc kubenswrapper[4799]: E0121 17:34:59.545763 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:35:01 crc kubenswrapper[4799]: I0121 17:35:01.205102 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:01 crc kubenswrapper[4799]: I0121 17:35:01.205229 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:01 crc kubenswrapper[4799]: I0121 17:35:01.205287 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:01 crc kubenswrapper[4799]: E0121 17:35:01.205318 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:01 crc kubenswrapper[4799]: I0121 17:35:01.205243 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:01 crc kubenswrapper[4799]: E0121 17:35:01.205524 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:01 crc kubenswrapper[4799]: E0121 17:35:01.205592 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:01 crc kubenswrapper[4799]: E0121 17:35:01.205655 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:02 crc kubenswrapper[4799]: I0121 17:35:02.792300 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/1.log" Jan 21 17:35:02 crc kubenswrapper[4799]: I0121 17:35:02.794063 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/0.log" Jan 21 17:35:02 crc kubenswrapper[4799]: I0121 17:35:02.794159 4799 generic.go:334] "Generic (PLEG): container finished" podID="3004f2e1-bd6a-46a1-a6d9-835472f616b8" containerID="cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f" exitCode=1 Jan 21 17:35:02 crc kubenswrapper[4799]: I0121 17:35:02.794209 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-sl7lv" event={"ID":"3004f2e1-bd6a-46a1-a6d9-835472f616b8","Type":"ContainerDied","Data":"cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f"} Jan 21 17:35:02 crc kubenswrapper[4799]: I0121 17:35:02.794258 4799 scope.go:117] "RemoveContainer" containerID="009a54ff4e14bf02ef59eb6a2c236fa43f574bbb5c1a8952ad62915e23be2a61" Jan 21 17:35:02 crc kubenswrapper[4799]: I0121 17:35:02.794870 4799 scope.go:117] "RemoveContainer" containerID="cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f" Jan 21 17:35:02 crc kubenswrapper[4799]: E0121 17:35:02.795043 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-sl7lv_openshift-multus(3004f2e1-bd6a-46a1-a6d9-835472f616b8)\"" pod="openshift-multus/multus-sl7lv" podUID="3004f2e1-bd6a-46a1-a6d9-835472f616b8" Jan 21 17:35:03 crc kubenswrapper[4799]: I0121 17:35:03.204568 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:03 crc kubenswrapper[4799]: I0121 17:35:03.204597 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:03 crc kubenswrapper[4799]: E0121 17:35:03.205303 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:03 crc kubenswrapper[4799]: I0121 17:35:03.204644 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:03 crc kubenswrapper[4799]: I0121 17:35:03.204617 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:03 crc kubenswrapper[4799]: E0121 17:35:03.205437 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:03 crc kubenswrapper[4799]: E0121 17:35:03.205537 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:03 crc kubenswrapper[4799]: E0121 17:35:03.205649 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:03 crc kubenswrapper[4799]: I0121 17:35:03.801973 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/1.log" Jan 21 17:35:04 crc kubenswrapper[4799]: E0121 17:35:04.546791 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:35:05 crc kubenswrapper[4799]: I0121 17:35:05.204098 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:05 crc kubenswrapper[4799]: I0121 17:35:05.204238 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:05 crc kubenswrapper[4799]: E0121 17:35:05.204395 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:05 crc kubenswrapper[4799]: I0121 17:35:05.204679 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:05 crc kubenswrapper[4799]: I0121 17:35:05.204714 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:05 crc kubenswrapper[4799]: E0121 17:35:05.204778 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:05 crc kubenswrapper[4799]: E0121 17:35:05.204920 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:05 crc kubenswrapper[4799]: E0121 17:35:05.205007 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.204569 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.204725 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:07 crc kubenswrapper[4799]: E0121 17:35:07.204811 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.204843 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.204876 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:07 crc kubenswrapper[4799]: E0121 17:35:07.205243 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:07 crc kubenswrapper[4799]: E0121 17:35:07.205373 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:07 crc kubenswrapper[4799]: E0121 17:35:07.205892 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.206149 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.823934 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/3.log" Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.827927 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerStarted","Data":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.828510 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:35:07 crc kubenswrapper[4799]: I0121 17:35:07.869488 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podStartSLOduration=105.869465344 podStartE2EDuration="1m45.869465344s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:07.869105933 +0000 UTC m=+134.495395976" watchObservedRunningTime="2026-01-21 17:35:07.869465344 +0000 UTC m=+134.495755367" Jan 21 17:35:08 crc kubenswrapper[4799]: I0121 17:35:08.697971 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-7q999"] Jan 21 17:35:08 crc kubenswrapper[4799]: I0121 17:35:08.698138 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:08 crc kubenswrapper[4799]: E0121 17:35:08.698238 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:09 crc kubenswrapper[4799]: I0121 17:35:09.204691 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:09 crc kubenswrapper[4799]: E0121 17:35:09.205367 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:09 crc kubenswrapper[4799]: I0121 17:35:09.204753 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:09 crc kubenswrapper[4799]: E0121 17:35:09.205468 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:09 crc kubenswrapper[4799]: I0121 17:35:09.204691 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:09 crc kubenswrapper[4799]: E0121 17:35:09.205856 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:09 crc kubenswrapper[4799]: E0121 17:35:09.548615 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:35:10 crc kubenswrapper[4799]: I0121 17:35:10.204925 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:10 crc kubenswrapper[4799]: E0121 17:35:10.205167 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:11 crc kubenswrapper[4799]: I0121 17:35:11.204072 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:11 crc kubenswrapper[4799]: I0121 17:35:11.204207 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:11 crc kubenswrapper[4799]: E0121 17:35:11.204245 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:11 crc kubenswrapper[4799]: I0121 17:35:11.204313 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:11 crc kubenswrapper[4799]: E0121 17:35:11.204416 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:11 crc kubenswrapper[4799]: E0121 17:35:11.204535 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:12 crc kubenswrapper[4799]: I0121 17:35:12.205158 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:12 crc kubenswrapper[4799]: E0121 17:35:12.205380 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:13 crc kubenswrapper[4799]: I0121 17:35:13.204720 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:13 crc kubenswrapper[4799]: I0121 17:35:13.204792 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:13 crc kubenswrapper[4799]: I0121 17:35:13.204720 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:13 crc kubenswrapper[4799]: E0121 17:35:13.204931 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:13 crc kubenswrapper[4799]: E0121 17:35:13.205076 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:13 crc kubenswrapper[4799]: E0121 17:35:13.205326 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:14 crc kubenswrapper[4799]: I0121 17:35:14.204782 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:14 crc kubenswrapper[4799]: E0121 17:35:14.205919 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:14 crc kubenswrapper[4799]: E0121 17:35:14.549541 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:35:15 crc kubenswrapper[4799]: I0121 17:35:15.204691 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:15 crc kubenswrapper[4799]: I0121 17:35:15.204796 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:15 crc kubenswrapper[4799]: I0121 17:35:15.204700 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:15 crc kubenswrapper[4799]: E0121 17:35:15.204934 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:15 crc kubenswrapper[4799]: E0121 17:35:15.205074 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:15 crc kubenswrapper[4799]: E0121 17:35:15.205253 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:16 crc kubenswrapper[4799]: I0121 17:35:16.204807 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:16 crc kubenswrapper[4799]: E0121 17:35:16.204997 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:17 crc kubenswrapper[4799]: I0121 17:35:17.204649 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:17 crc kubenswrapper[4799]: I0121 17:35:17.204738 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:17 crc kubenswrapper[4799]: I0121 17:35:17.204757 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:17 crc kubenswrapper[4799]: E0121 17:35:17.204862 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:17 crc kubenswrapper[4799]: E0121 17:35:17.205000 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:17 crc kubenswrapper[4799]: E0121 17:35:17.205235 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:18 crc kubenswrapper[4799]: I0121 17:35:18.204857 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:18 crc kubenswrapper[4799]: E0121 17:35:18.205089 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:18 crc kubenswrapper[4799]: I0121 17:35:18.205365 4799 scope.go:117] "RemoveContainer" containerID="cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f" Jan 21 17:35:18 crc kubenswrapper[4799]: I0121 17:35:18.875958 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/1.log" Jan 21 17:35:18 crc kubenswrapper[4799]: I0121 17:35:18.876493 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-sl7lv" event={"ID":"3004f2e1-bd6a-46a1-a6d9-835472f616b8","Type":"ContainerStarted","Data":"6c3bc39cc19c866dd40455a982701ba69abfab4ec1850efa28878c9028541555"} Jan 21 17:35:19 crc kubenswrapper[4799]: I0121 17:35:19.204600 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:19 crc kubenswrapper[4799]: I0121 17:35:19.204696 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:19 crc kubenswrapper[4799]: I0121 17:35:19.204598 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:19 crc kubenswrapper[4799]: E0121 17:35:19.204828 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:19 crc kubenswrapper[4799]: E0121 17:35:19.204952 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:19 crc kubenswrapper[4799]: E0121 17:35:19.205048 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:19 crc kubenswrapper[4799]: E0121 17:35:19.552059 4799 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 21 17:35:20 crc kubenswrapper[4799]: I0121 17:35:20.204437 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:20 crc kubenswrapper[4799]: E0121 17:35:20.204669 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:21 crc kubenswrapper[4799]: I0121 17:35:21.204400 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:21 crc kubenswrapper[4799]: I0121 17:35:21.204438 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:21 crc kubenswrapper[4799]: E0121 17:35:21.204671 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:21 crc kubenswrapper[4799]: I0121 17:35:21.204436 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:21 crc kubenswrapper[4799]: E0121 17:35:21.204931 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:21 crc kubenswrapper[4799]: E0121 17:35:21.205105 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:22 crc kubenswrapper[4799]: I0121 17:35:22.204959 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:22 crc kubenswrapper[4799]: E0121 17:35:22.205222 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:23 crc kubenswrapper[4799]: I0121 17:35:23.204406 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:23 crc kubenswrapper[4799]: I0121 17:35:23.204519 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:23 crc kubenswrapper[4799]: E0121 17:35:23.204567 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 21 17:35:23 crc kubenswrapper[4799]: E0121 17:35:23.204741 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 21 17:35:23 crc kubenswrapper[4799]: I0121 17:35:23.204822 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:23 crc kubenswrapper[4799]: E0121 17:35:23.204906 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 21 17:35:24 crc kubenswrapper[4799]: I0121 17:35:24.208001 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:24 crc kubenswrapper[4799]: E0121 17:35:24.208225 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7q999" podUID="7796adba-b973-44ee-b0c4-c0df544250e3" Jan 21 17:35:25 crc kubenswrapper[4799]: I0121 17:35:25.204434 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:25 crc kubenswrapper[4799]: I0121 17:35:25.204485 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:25 crc kubenswrapper[4799]: I0121 17:35:25.204501 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:25 crc kubenswrapper[4799]: I0121 17:35:25.208286 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 21 17:35:25 crc kubenswrapper[4799]: I0121 17:35:25.209253 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 21 17:35:25 crc kubenswrapper[4799]: I0121 17:35:25.209393 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 21 17:35:25 crc kubenswrapper[4799]: I0121 17:35:25.210477 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 21 17:35:26 crc kubenswrapper[4799]: I0121 17:35:26.042097 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:35:26 crc kubenswrapper[4799]: I0121 17:35:26.204785 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:26 crc kubenswrapper[4799]: I0121 17:35:26.206819 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 21 17:35:26 crc kubenswrapper[4799]: I0121 17:35:26.208039 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.497567 4799 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.555402 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-w2n7v"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.556121 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.561191 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rk6k5"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.561880 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wr5c4"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.562080 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.562457 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.562990 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.563340 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.566672 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.566772 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.566898 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.566891 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.567007 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.567461 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.568088 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.568212 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.568238 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.568216 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.568969 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mfdx4"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.569562 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.569634 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.569837 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.569928 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.570344 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.572566 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.573268 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.573419 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.573632 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.574007 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.574401 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.577348 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.578526 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.582892 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.583207 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.589524 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.590064 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.590433 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.590609 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.591050 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.592534 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.596337 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.596368 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.596794 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.597368 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.597583 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.597707 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.598383 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.598812 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.598881 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599083 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599190 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599269 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599366 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599460 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599535 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599622 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599685 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599773 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599782 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.599896 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600040 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600164 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600291 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600350 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600417 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-m875t"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600550 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600601 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600654 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600823 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600832 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600834 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600917 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600945 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.601021 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.601068 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.601105 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.601258 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.601313 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.601360 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600878 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.601264 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.600903 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.601833 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-6cjlt"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.619816 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.621870 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38e1abaa-9da0-4924-a6b5-ee9617cf304d-config\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.621918 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp8cd\" (UniqueName: \"kubernetes.io/projected/9f4ff0b4-f3da-4cab-a054-970565d09713-kube-api-access-kp8cd\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.621957 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/14f1d8ff-d287-44f6-8427-2cc844cab8d1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-24jlh\" (UID: \"14f1d8ff-d287-44f6-8427-2cc844cab8d1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.621991 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0eabe68-5cf0-4bc2-8578-02f73622072b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ftnlt\" (UID: \"b0eabe68-5cf0-4bc2-8578-02f73622072b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622026 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f4ff0b4-f3da-4cab-a054-970565d09713-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622062 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/bb70641c-df77-4b31-bc8d-d996213797cc-machine-approver-tls\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622089 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a6e3985-ef4a-451a-90cf-4b313527298c-serving-cert\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622177 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-config\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622208 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j2hm\" (UniqueName: \"kubernetes.io/projected/38e1abaa-9da0-4924-a6b5-ee9617cf304d-kube-api-access-2j2hm\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622238 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vhh9\" (UniqueName: \"kubernetes.io/projected/b0eabe68-5cf0-4bc2-8578-02f73622072b-kube-api-access-8vhh9\") pod \"package-server-manager-789f6589d5-ftnlt\" (UID: \"b0eabe68-5cf0-4bc2-8578-02f73622072b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622268 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4ff0b4-f3da-4cab-a054-970565d09713-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622295 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bb70641c-df77-4b31-bc8d-d996213797cc-auth-proxy-config\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622336 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb70641c-df77-4b31-bc8d-d996213797cc-config\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622366 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-client-ca\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622408 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/38e1abaa-9da0-4924-a6b5-ee9617cf304d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622438 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m27v\" (UniqueName: \"kubernetes.io/projected/9a6e3985-ef4a-451a-90cf-4b313527298c-kube-api-access-7m27v\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.622713 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/38e1abaa-9da0-4924-a6b5-ee9617cf304d-images\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.623007 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2qm6\" (UniqueName: \"kubernetes.io/projected/14f1d8ff-d287-44f6-8427-2cc844cab8d1-kube-api-access-k2qm6\") pod \"cluster-samples-operator-665b6dd947-24jlh\" (UID: \"14f1d8ff-d287-44f6-8427-2cc844cab8d1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.623198 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pl99\" (UniqueName: \"kubernetes.io/projected/bb70641c-df77-4b31-bc8d-d996213797cc-kube-api-access-5pl99\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.623395 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.644039 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.644506 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.644718 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.644953 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.646378 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.647612 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.647839 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.651277 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mr95r"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.654119 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.654661 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.654903 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.655041 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.655659 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.659345 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.659809 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.662362 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.667610 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.668182 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.668233 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.668402 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.668402 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.668525 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.668589 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.668798 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.669016 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.669331 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.669420 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.671231 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.671411 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.671671 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.671865 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.671989 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.672106 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.672184 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.672249 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.672737 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.672855 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.672955 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.673186 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.674885 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.675031 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-chqwl"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.675233 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.675442 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.675558 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.675622 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wqt42"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.675639 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.675892 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.675959 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.676011 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.676560 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.676706 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.676845 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.676966 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.677108 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.677245 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.676451 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.677538 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.677615 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.678260 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.678734 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.679037 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.679059 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.687949 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.689626 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.690598 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-96sxw"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.691269 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2wknp"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.691310 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.691779 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q5ndp"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.692143 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.692196 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.692312 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.692408 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.692195 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-l8wnp"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.698448 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.702440 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.717274 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.719636 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.746763 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.746788 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.747423 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.747924 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.748380 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.749095 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.750651 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-46bpg"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.751211 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.751500 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.752121 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.752237 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.752573 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.752346 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.752346 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e45b169d-862b-4326-a005-063cead60ac4-serving-cert\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753497 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j2hm\" (UniqueName: \"kubernetes.io/projected/38e1abaa-9da0-4924-a6b5-ee9617cf304d-kube-api-access-2j2hm\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753531 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vhh9\" (UniqueName: \"kubernetes.io/projected/b0eabe68-5cf0-4bc2-8578-02f73622072b-kube-api-access-8vhh9\") pod \"package-server-manager-789f6589d5-ftnlt\" (UID: \"b0eabe68-5cf0-4bc2-8578-02f73622072b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753560 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-config\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753598 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8splh\" (UniqueName: \"kubernetes.io/projected/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-kube-api-access-8splh\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753639 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-config\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753676 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-image-import-ca\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753706 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4ff0b4-f3da-4cab-a054-970565d09713-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753743 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-etcd-client\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753809 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bb70641c-df77-4b31-bc8d-d996213797cc-auth-proxy-config\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753840 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb70641c-df77-4b31-bc8d-d996213797cc-config\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753865 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-client-ca\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753891 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-client-ca\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753924 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vw5m\" (UniqueName: \"kubernetes.io/projected/8ee870b9-12a2-466c-a4a2-697c9d8c9918-kube-api-access-8vw5m\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.753979 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/38e1abaa-9da0-4924-a6b5-ee9617cf304d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754006 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m27v\" (UniqueName: \"kubernetes.io/projected/9a6e3985-ef4a-451a-90cf-4b313527298c-kube-api-access-7m27v\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754031 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ee870b9-12a2-466c-a4a2-697c9d8c9918-serving-cert\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754081 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/38e1abaa-9da0-4924-a6b5-ee9617cf304d-images\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754111 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2qm6\" (UniqueName: \"kubernetes.io/projected/14f1d8ff-d287-44f6-8427-2cc844cab8d1-kube-api-access-k2qm6\") pod \"cluster-samples-operator-665b6dd947-24jlh\" (UID: \"14f1d8ff-d287-44f6-8427-2cc844cab8d1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754164 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-service-ca-bundle\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754202 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pl99\" (UniqueName: \"kubernetes.io/projected/bb70641c-df77-4b31-bc8d-d996213797cc-kube-api-access-5pl99\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754241 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754270 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8ee870b9-12a2-466c-a4a2-697c9d8c9918-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754302 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8k4l\" (UniqueName: \"kubernetes.io/projected/e45b169d-862b-4326-a005-063cead60ac4-kube-api-access-q8k4l\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754328 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-serving-cert\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754353 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-config\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754399 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38e1abaa-9da0-4924-a6b5-ee9617cf304d-config\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-audit-dir\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754459 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp8cd\" (UniqueName: \"kubernetes.io/projected/9f4ff0b4-f3da-4cab-a054-970565d09713-kube-api-access-kp8cd\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754484 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-node-pullsecrets\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754510 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/118f3348-2c20-44d1-96dc-03b2aa49cea2-serving-cert\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754540 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/14f1d8ff-d287-44f6-8427-2cc844cab8d1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-24jlh\" (UID: \"14f1d8ff-d287-44f6-8427-2cc844cab8d1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754575 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0eabe68-5cf0-4bc2-8578-02f73622072b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ftnlt\" (UID: \"b0eabe68-5cf0-4bc2-8578-02f73622072b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754613 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754648 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f4ff0b4-f3da-4cab-a054-970565d09713-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754674 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/bb70641c-df77-4b31-bc8d-d996213797cc-machine-approver-tls\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754707 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a6e3985-ef4a-451a-90cf-4b313527298c-serving-cert\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-etcd-serving-ca\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754788 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-audit\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754825 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-config\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754853 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-trusted-ca-bundle\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754891 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-encryption-config\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754916 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpkdc\" (UniqueName: \"kubernetes.io/projected/118f3348-2c20-44d1-96dc-03b2aa49cea2-kube-api-access-mpkdc\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754923 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bb70641c-df77-4b31-bc8d-d996213797cc-auth-proxy-config\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.754952 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb70641c-df77-4b31-bc8d-d996213797cc-config\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.757470 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/38e1abaa-9da0-4924-a6b5-ee9617cf304d-images\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.759517 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-config\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.759645 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-client-ca\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.760073 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4ff0b4-f3da-4cab-a054-970565d09713-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.760947 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.762835 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.764309 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-q6bfd"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.764289 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38e1abaa-9da0-4924-a6b5-ee9617cf304d-config\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.768957 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5mmz6"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.770006 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rk6k5"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.770042 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.770059 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mfdx4"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.770322 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.770703 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.772235 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/bb70641c-df77-4b31-bc8d-d996213797cc-machine-approver-tls\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.772875 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a6e3985-ef4a-451a-90cf-4b313527298c-serving-cert\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.773400 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.774230 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/38e1abaa-9da0-4924-a6b5-ee9617cf304d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.774633 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f4ff0b4-f3da-4cab-a054-970565d09713-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.778576 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0eabe68-5cf0-4bc2-8578-02f73622072b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ftnlt\" (UID: \"b0eabe68-5cf0-4bc2-8578-02f73622072b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.778860 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.780888 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.781092 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-t47vq"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.783631 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.784091 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-t47vq" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.788607 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/14f1d8ff-d287-44f6-8427-2cc844cab8d1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-24jlh\" (UID: \"14f1d8ff-d287-44f6-8427-2cc844cab8d1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.792994 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.800300 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.802330 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.808314 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.809188 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.814349 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.815873 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-m875t"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.817740 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.819903 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.820086 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.820366 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-l8wnp"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.823818 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wr5c4"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.824313 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-6cjlt"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.826928 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.828272 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wqt42"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.835602 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.835659 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.837544 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.840139 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.840184 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-96sxw"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.841352 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-w2n7v"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.842736 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mr95r"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.850568 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.850678 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.851863 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-sm66d"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.852659 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855739 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-config\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855775 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-image-import-ca\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855802 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-etcd-client\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855820 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-client-ca\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855861 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vw5m\" (UniqueName: \"kubernetes.io/projected/8ee870b9-12a2-466c-a4a2-697c9d8c9918-kube-api-access-8vw5m\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855905 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ee870b9-12a2-466c-a4a2-697c9d8c9918-serving-cert\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855935 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-service-ca-bundle\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855965 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.855998 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8ee870b9-12a2-466c-a4a2-697c9d8c9918-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856021 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8k4l\" (UniqueName: \"kubernetes.io/projected/e45b169d-862b-4326-a005-063cead60ac4-kube-api-access-q8k4l\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856043 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-serving-cert\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856101 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-config\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856176 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-audit-dir\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856200 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-node-pullsecrets\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856230 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/118f3348-2c20-44d1-96dc-03b2aa49cea2-serving-cert\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856259 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856294 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-etcd-serving-ca\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856333 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-audit\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856370 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-trusted-ca-bundle\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856403 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-encryption-config\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856423 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpkdc\" (UniqueName: \"kubernetes.io/projected/118f3348-2c20-44d1-96dc-03b2aa49cea2-kube-api-access-mpkdc\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856459 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e45b169d-862b-4326-a005-063cead60ac4-serving-cert\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856489 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-config\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.856512 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8splh\" (UniqueName: \"kubernetes.io/projected/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-kube-api-access-8splh\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.858854 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-config\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.859521 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-j254q"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.859767 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-client-ca\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.862040 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.865690 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2wknp"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.865741 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.865910 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.866256 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-image-import-ca\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.866795 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-service-ca-bundle\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.867100 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8ee870b9-12a2-466c-a4a2-697c9d8c9918-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.867361 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-etcd-serving-ca\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.867918 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-audit\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.868466 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-etcd-client\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.868919 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ee870b9-12a2-466c-a4a2-697c9d8c9918-serving-cert\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.869314 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-audit-dir\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.869398 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-node-pullsecrets\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.869869 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.870691 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-config\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.874670 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e45b169d-862b-4326-a005-063cead60ac4-serving-cert\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.875713 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-trusted-ca-bundle\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.876370 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-serving-cert\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.877028 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/118f3348-2c20-44d1-96dc-03b2aa49cea2-serving-cert\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.877093 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-encryption-config\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.877368 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/118f3348-2c20-44d1-96dc-03b2aa49cea2-config\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.878857 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.884993 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.889231 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q5ndp"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.892495 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-46bpg"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.894082 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.895195 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-chqwl"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.896810 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-gxpvf"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.897848 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.898017 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.899789 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.900468 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.902494 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5mmz6"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.904676 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-t47vq"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.906986 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-j254q"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.908392 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sm66d"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.909558 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc"] Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.918572 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.939120 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.958852 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.979954 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 21 17:35:31 crc kubenswrapper[4799]: I0121 17:35:31.999744 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.019344 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.039647 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.058459 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.079152 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.099973 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.119182 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.151160 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.163063 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.179205 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.199751 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.220357 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.239552 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.259060 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.280172 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.300179 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.319769 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.339105 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.358722 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.380561 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.399782 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.418954 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.439242 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.459381 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.479663 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.499013 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.519280 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.547471 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.561998 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.579120 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.600000 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.624899 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.640179 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.659028 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.679870 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.699813 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.719715 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.740265 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.757105 4799 request.go:700] Waited for 1.008384621s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmcc-proxy-tls&limit=500&resourceVersion=0 Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.759875 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.779556 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.798715 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.818284 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.841726 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.899671 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.919411 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.938935 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.959261 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.989079 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 21 17:35:32 crc kubenswrapper[4799]: I0121 17:35:32.998974 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.020323 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.040574 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.059712 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.080101 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.121483 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j2hm\" (UniqueName: \"kubernetes.io/projected/38e1abaa-9da0-4924-a6b5-ee9617cf304d-kube-api-access-2j2hm\") pod \"machine-api-operator-5694c8668f-rk6k5\" (UID: \"38e1abaa-9da0-4924-a6b5-ee9617cf304d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.141093 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vhh9\" (UniqueName: \"kubernetes.io/projected/b0eabe68-5cf0-4bc2-8578-02f73622072b-kube-api-access-8vhh9\") pod \"package-server-manager-789f6589d5-ftnlt\" (UID: \"b0eabe68-5cf0-4bc2-8578-02f73622072b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.157823 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp8cd\" (UniqueName: \"kubernetes.io/projected/9f4ff0b4-f3da-4cab-a054-970565d09713-kube-api-access-kp8cd\") pod \"kube-storage-version-migrator-operator-b67b599dd-s96kb\" (UID: \"9f4ff0b4-f3da-4cab-a054-970565d09713\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.159519 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.174193 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pl99\" (UniqueName: \"kubernetes.io/projected/bb70641c-df77-4b31-bc8d-d996213797cc-kube-api-access-5pl99\") pod \"machine-approver-56656f9798-bwn6r\" (UID: \"bb70641c-df77-4b31-bc8d-d996213797cc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.194509 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m27v\" (UniqueName: \"kubernetes.io/projected/9a6e3985-ef4a-451a-90cf-4b313527298c-kube-api-access-7m27v\") pod \"route-controller-manager-6576b87f9c-59kzj\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.215746 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2qm6\" (UniqueName: \"kubernetes.io/projected/14f1d8ff-d287-44f6-8427-2cc844cab8d1-kube-api-access-k2qm6\") pod \"cluster-samples-operator-665b6dd947-24jlh\" (UID: \"14f1d8ff-d287-44f6-8427-2cc844cab8d1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.219383 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.221096 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.239410 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.260307 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.273792 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:33 crc kubenswrapper[4799]: E0121 17:35:33.274104 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:37:35.274064907 +0000 UTC m=+281.900354930 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.275829 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.279478 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.300091 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.308704 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.319321 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.324841 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.339621 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.358958 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.375258 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.375323 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.379111 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.380870 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.399329 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.419878 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.427068 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.440610 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.460781 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.480011 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.504779 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.520149 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.539785 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.559491 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.577615 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.577688 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.580224 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.581102 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.582725 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.598755 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.620610 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.639639 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.640758 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.647350 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.674205 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8splh\" (UniqueName: \"kubernetes.io/projected/f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd-kube-api-access-8splh\") pod \"apiserver-76f77b778f-w2n7v\" (UID: \"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd\") " pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.690080 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.700035 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vw5m\" (UniqueName: \"kubernetes.io/projected/8ee870b9-12a2-466c-a4a2-697c9d8c9918-kube-api-access-8vw5m\") pod \"openshift-config-operator-7777fb866f-vfk7d\" (UID: \"8ee870b9-12a2-466c-a4a2-697c9d8c9918\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.708956 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.717422 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8k4l\" (UniqueName: \"kubernetes.io/projected/e45b169d-862b-4326-a005-063cead60ac4-kube-api-access-q8k4l\") pod \"controller-manager-879f6c89f-wr5c4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.737219 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpkdc\" (UniqueName: \"kubernetes.io/projected/118f3348-2c20-44d1-96dc-03b2aa49cea2-kube-api-access-mpkdc\") pod \"authentication-operator-69f744f599-mfdx4\" (UID: \"118f3348-2c20-44d1-96dc-03b2aa49cea2\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.739929 4799 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.760081 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.777646 4799 request.go:700] Waited for 1.885860726s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.777816 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.783446 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.800081 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.818804 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.819835 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.841449 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885081 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxxmt\" (UniqueName: \"kubernetes.io/projected/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-kube-api-access-xxxmt\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885118 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/a3b37351-15c4-4cf3-8af5-1486009713a6-tmpfs\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885156 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885192 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mqws\" (UniqueName: \"kubernetes.io/projected/45703959-9502-44ab-a19e-19d702259346-kube-api-access-9mqws\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885217 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/dd4d0095-1e20-4fcf-937f-1351374f36c6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nzghk\" (UID: \"dd4d0095-1e20-4fcf-937f-1351374f36c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885236 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e82402a-bf1e-418b-9ec3-7723300db21b-secret-volume\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885262 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885283 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-oauth-config\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885714 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f8c732f-a717-4c56-8415-06a4b74e3372-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885775 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-images\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885794 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-certificates\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885810 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mghkv\" (UniqueName: \"kubernetes.io/projected/d06abe7d-735c-46b1-b98a-f7ef020fe863-kube-api-access-mghkv\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885826 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnsrl\" (UniqueName: \"kubernetes.io/projected/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-kube-api-access-lnsrl\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885842 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-audit-policies\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885862 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885877 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-config\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885895 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885944 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afc63db8-935e-43c5-952b-593f1b1e3350-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885970 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-bound-sa-token\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.885998 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886015 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886031 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45703959-9502-44ab-a19e-19d702259346-config\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886050 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-config\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886077 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-tls\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886109 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a3b37351-15c4-4cf3-8af5-1486009713a6-apiservice-cert\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886140 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886155 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-service-ca\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886199 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjz4n\" (UniqueName: \"kubernetes.io/projected/9e82402a-bf1e-418b-9ec3-7723300db21b-kube-api-access-bjz4n\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886217 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-dir\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886233 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886250 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc63db8-935e-43c5-952b-593f1b1e3350-config\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886266 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-ca\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886279 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886296 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-config\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886311 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acba609b-3b15-4514-9237-0d7b4faa356a-serving-cert\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886327 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-trusted-ca\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886342 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886365 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-proxy-tls\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886401 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/240455aa-026d-4291-a205-1451b6e0e397-srv-cert\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.886417 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-client\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.887817 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh9r6\" (UniqueName: \"kubernetes.io/projected/46c59bb9-7544-496f-a38c-1054b3b95ae8-kube-api-access-sh9r6\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.887839 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/17204b6d-9470-46fc-996a-5aab9eaef223-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-l8wnp\" (UID: \"17204b6d-9470-46fc-996a-5aab9eaef223\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.887867 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a3b37351-15c4-4cf3-8af5-1486009713a6-webhook-cert\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.887884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xprdr\" (UniqueName: \"kubernetes.io/projected/a3b37351-15c4-4cf3-8af5-1486009713a6-kube-api-access-xprdr\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.887933 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.887953 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.887974 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891212 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ncfc\" (UniqueName: \"kubernetes.io/projected/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-kube-api-access-5ncfc\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891256 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-484w2\" (UniqueName: \"kubernetes.io/projected/06fabbfb-ca52-4980-9478-5fbe09bca884-kube-api-access-484w2\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891281 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mhbs\" (UniqueName: \"kubernetes.io/projected/240455aa-026d-4291-a205-1451b6e0e397-kube-api-access-7mhbs\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891350 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891384 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-etcd-client\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891402 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78bm7\" (UniqueName: \"kubernetes.io/projected/07c163b1-b21e-4905-944b-ea8f34437277-kube-api-access-78bm7\") pod \"migrator-59844c95c7-qs4j8\" (UID: \"07c163b1-b21e-4905-944b-ea8f34437277\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qjbm\" (UniqueName: \"kubernetes.io/projected/2f8c732f-a717-4c56-8415-06a4b74e3372-kube-api-access-6qjbm\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891657 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-srv-cert\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891724 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-policies\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891762 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/afc63db8-935e-43c5-952b-593f1b1e3350-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891845 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z5mz\" (UniqueName: \"kubernetes.io/projected/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-kube-api-access-7z5mz\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891866 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-audit-dir\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891882 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.891912 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64df8\" (UniqueName: \"kubernetes.io/projected/17204b6d-9470-46fc-996a-5aab9eaef223-kube-api-access-64df8\") pod \"multus-admission-controller-857f4d67dd-l8wnp\" (UID: \"17204b6d-9470-46fc-996a-5aab9eaef223\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.892243 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/740090e7-79a4-4e3c-b77d-7969f1d327d6-signing-key\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.892494 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzp92\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-kube-api-access-vzp92\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.892520 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.892552 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj2sx\" (UniqueName: \"kubernetes.io/projected/ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a-kube-api-access-pj2sx\") pod \"downloads-7954f5f757-6cjlt\" (UID: \"ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a\") " pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.892572 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-oauth-serving-cert\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.892892 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.892993 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893023 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893098 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lttdv\" (UniqueName: \"kubernetes.io/projected/9f43309b-1eef-471f-8359-d7a35b677818-kube-api-access-lttdv\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893181 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-serving-cert\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893408 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-encryption-config\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893451 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893799 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plgfx\" (UniqueName: \"kubernetes.io/projected/dd4d0095-1e20-4fcf-937f-1351374f36c6-kube-api-access-plgfx\") pod \"control-plane-machine-set-operator-78cbb6b69f-nzghk\" (UID: \"dd4d0095-1e20-4fcf-937f-1351374f36c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893826 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-service-ca\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893887 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-serving-cert\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893938 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893956 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.893973 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9f43309b-1eef-471f-8359-d7a35b677818-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.894506 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.894817 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhrpb\" (UniqueName: \"kubernetes.io/projected/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-kube-api-access-zhrpb\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.894843 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/240455aa-026d-4291-a205-1451b6e0e397-profile-collector-cert\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.894923 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-serving-cert\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.894945 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f8c732f-a717-4c56-8415-06a4b74e3372-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895000 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqkwb\" (UniqueName: \"kubernetes.io/projected/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-kube-api-access-jqkwb\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895073 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-trusted-ca\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895529 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e82402a-bf1e-418b-9ec3-7723300db21b-config-volume\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895548 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895587 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k822g\" (UniqueName: \"kubernetes.io/projected/740090e7-79a4-4e3c-b77d-7969f1d327d6-kube-api-access-k822g\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895624 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-trusted-ca-bundle\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895651 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895724 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/740090e7-79a4-4e3c-b77d-7969f1d327d6-signing-cabundle\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f43309b-1eef-471f-8359-d7a35b677818-proxy-tls\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895764 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45703959-9502-44ab-a19e-19d702259346-serving-cert\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895862 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x29vf\" (UniqueName: \"kubernetes.io/projected/acba609b-3b15-4514-9237-0d7b4faa356a-kube-api-access-x29vf\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.895895 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:33 crc kubenswrapper[4799]: E0121 17:35:33.896667 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:34.396647916 +0000 UTC m=+161.022937989 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.918670 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.929928 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 21 17:35:33 crc kubenswrapper[4799]: I0121 17:35:33.945408 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" event={"ID":"bb70641c-df77-4b31-bc8d-d996213797cc","Type":"ContainerStarted","Data":"2da5cea4fdfc65a5959c121f2bd5dfe1f2475c65e486090d0877ea2bab5253ec"} Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.131540 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132433 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/740090e7-79a4-4e3c-b77d-7969f1d327d6-signing-cabundle\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132466 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f43309b-1eef-471f-8359-d7a35b677818-proxy-tls\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132539 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45703959-9502-44ab-a19e-19d702259346-serving-cert\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132614 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x29vf\" (UniqueName: \"kubernetes.io/projected/acba609b-3b15-4514-9237-0d7b4faa356a-kube-api-access-x29vf\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132681 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132740 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxxmt\" (UniqueName: \"kubernetes.io/projected/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-kube-api-access-xxxmt\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132767 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/a3b37351-15c4-4cf3-8af5-1486009713a6-tmpfs\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132790 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132837 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mqws\" (UniqueName: \"kubernetes.io/projected/45703959-9502-44ab-a19e-19d702259346-kube-api-access-9mqws\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132867 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/dd4d0095-1e20-4fcf-937f-1351374f36c6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nzghk\" (UID: \"dd4d0095-1e20-4fcf-937f-1351374f36c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132918 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e82402a-bf1e-418b-9ec3-7723300db21b-secret-volume\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.132993 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.133024 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-oauth-config\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.133082 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.133109 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f8c732f-a717-4c56-8415-06a4b74e3372-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.133176 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-images\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.133251 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-certificates\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.133282 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mghkv\" (UniqueName: \"kubernetes.io/projected/d06abe7d-735c-46b1-b98a-f7ef020fe863-kube-api-access-mghkv\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.133341 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnsrl\" (UniqueName: \"kubernetes.io/projected/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-kube-api-access-lnsrl\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.135576 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/740090e7-79a4-4e3c-b77d-7969f1d327d6-signing-cabundle\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.135737 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.138542 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/a3b37351-15c4-4cf3-8af5-1486009713a6-tmpfs\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: E0121 17:35:34.140407 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:34.640317086 +0000 UTC m=+161.266607119 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.162700 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-audit-policies\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.162763 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.162825 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-config\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.163348 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f8c732f-a717-4c56-8415-06a4b74e3372-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.168453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.168660 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afc63db8-935e-43c5-952b-593f1b1e3350-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.169876 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-bound-sa-token\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.170017 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.170174 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.170287 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45703959-9502-44ab-a19e-19d702259346-config\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.170400 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-config\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.170551 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-tls\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.170676 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.170798 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a3b37351-15c4-4cf3-8af5-1486009713a6-apiservice-cert\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.174688 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-audit-policies\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.177695 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-config\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.170896 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.178610 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-certificates\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.179082 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-service-ca\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.179228 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-ca\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.169171 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-images\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.180058 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-service-ca\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.180361 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.192708 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.192839 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f43309b-1eef-471f-8359-d7a35b677818-proxy-tls\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.193236 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.193457 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/dd4d0095-1e20-4fcf-937f-1351374f36c6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nzghk\" (UID: \"dd4d0095-1e20-4fcf-937f-1351374f36c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.193843 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.194388 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mqws\" (UniqueName: \"kubernetes.io/projected/45703959-9502-44ab-a19e-19d702259346-kube-api-access-9mqws\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.194797 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.200004 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mghkv\" (UniqueName: \"kubernetes.io/projected/d06abe7d-735c-46b1-b98a-f7ef020fe863-kube-api-access-mghkv\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.200502 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-config\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.180383 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjz4n\" (UniqueName: \"kubernetes.io/projected/9e82402a-bf1e-418b-9ec3-7723300db21b-kube-api-access-bjz4n\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.204862 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x29vf\" (UniqueName: \"kubernetes.io/projected/acba609b-3b15-4514-9237-0d7b4faa356a-kube-api-access-x29vf\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.181067 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-ca\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.354523 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45703959-9502-44ab-a19e-19d702259346-serving-cert\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.355455 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-dir\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.360597 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364240 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-dir\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364380 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364414 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc63db8-935e-43c5-952b-593f1b1e3350-config\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364504 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364529 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-config\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364551 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acba609b-3b15-4514-9237-0d7b4faa356a-serving-cert\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364587 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-trusted-ca\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364670 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364705 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-proxy-tls\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364728 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh9r6\" (UniqueName: \"kubernetes.io/projected/46c59bb9-7544-496f-a38c-1054b3b95ae8-kube-api-access-sh9r6\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.364822 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnsrl\" (UniqueName: \"kubernetes.io/projected/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-kube-api-access-lnsrl\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.365898 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afc63db8-935e-43c5-952b-593f1b1e3350-config\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.366309 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-trusted-ca\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.367807 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.368008 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.369112 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-config\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.369427 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/240455aa-026d-4291-a205-1451b6e0e397-srv-cert\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.369468 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-client\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.369532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.369566 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/17204b6d-9470-46fc-996a-5aab9eaef223-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-l8wnp\" (UID: \"17204b6d-9470-46fc-996a-5aab9eaef223\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.371109 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-tls\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.379001 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.382098 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a3b37351-15c4-4cf3-8af5-1486009713a6-apiservice-cert\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.382280 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-oauth-config\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.385635 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-proxy-tls\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.386399 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acba609b-3b15-4514-9237-0d7b4faa356a-serving-cert\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.386547 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a3b37351-15c4-4cf3-8af5-1486009713a6-webhook-cert\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.386920 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xprdr\" (UniqueName: \"kubernetes.io/projected/a3b37351-15c4-4cf3-8af5-1486009713a6-kube-api-access-xprdr\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.386970 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.387004 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.387895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.388553 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.390979 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45703959-9502-44ab-a19e-19d702259346-config\") pod \"service-ca-operator-777779d784-chqwl\" (UID: \"45703959-9502-44ab-a19e-19d702259346\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.391590 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afc63db8-935e-43c5-952b-593f1b1e3350-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.391766 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-bound-sa-token\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.391846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ncfc\" (UniqueName: \"kubernetes.io/projected/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-kube-api-access-5ncfc\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.391938 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-484w2\" (UniqueName: \"kubernetes.io/projected/06fabbfb-ca52-4980-9478-5fbe09bca884-kube-api-access-484w2\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.391999 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mhbs\" (UniqueName: \"kubernetes.io/projected/240455aa-026d-4291-a205-1451b6e0e397-kube-api-access-7mhbs\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392551 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e82402a-bf1e-418b-9ec3-7723300db21b-secret-volume\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392627 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392713 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-etcd-client\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392734 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78bm7\" (UniqueName: \"kubernetes.io/projected/07c163b1-b21e-4905-944b-ea8f34437277-kube-api-access-78bm7\") pod \"migrator-59844c95c7-qs4j8\" (UID: \"07c163b1-b21e-4905-944b-ea8f34437277\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392752 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qjbm\" (UniqueName: \"kubernetes.io/projected/2f8c732f-a717-4c56-8415-06a4b74e3372-kube-api-access-6qjbm\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392796 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-srv-cert\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-policies\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392832 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/afc63db8-935e-43c5-952b-593f1b1e3350-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392857 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z5mz\" (UniqueName: \"kubernetes.io/projected/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-kube-api-access-7z5mz\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392878 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-audit-dir\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392895 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392934 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64df8\" (UniqueName: \"kubernetes.io/projected/17204b6d-9470-46fc-996a-5aab9eaef223-kube-api-access-64df8\") pod \"multus-admission-controller-857f4d67dd-l8wnp\" (UID: \"17204b6d-9470-46fc-996a-5aab9eaef223\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392971 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/740090e7-79a4-4e3c-b77d-7969f1d327d6-signing-key\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.392992 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzp92\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-kube-api-access-vzp92\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393011 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393029 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj2sx\" (UniqueName: \"kubernetes.io/projected/ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a-kube-api-access-pj2sx\") pod \"downloads-7954f5f757-6cjlt\" (UID: \"ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a\") " pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393050 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-oauth-serving-cert\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393074 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393100 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393118 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393176 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lttdv\" (UniqueName: \"kubernetes.io/projected/9f43309b-1eef-471f-8359-d7a35b677818-kube-api-access-lttdv\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393203 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-serving-cert\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393229 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-encryption-config\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393254 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393276 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plgfx\" (UniqueName: \"kubernetes.io/projected/dd4d0095-1e20-4fcf-937f-1351374f36c6-kube-api-access-plgfx\") pod \"control-plane-machine-set-operator-78cbb6b69f-nzghk\" (UID: \"dd4d0095-1e20-4fcf-937f-1351374f36c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393293 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-service-ca\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393313 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-serving-cert\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393330 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393362 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9f43309b-1eef-471f-8359-d7a35b677818-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393380 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393398 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhrpb\" (UniqueName: \"kubernetes.io/projected/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-kube-api-access-zhrpb\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393414 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/240455aa-026d-4291-a205-1451b6e0e397-profile-collector-cert\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393439 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-serving-cert\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393455 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f8c732f-a717-4c56-8415-06a4b74e3372-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393471 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-trusted-ca\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393489 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqkwb\" (UniqueName: \"kubernetes.io/projected/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-kube-api-access-jqkwb\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393540 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e82402a-bf1e-418b-9ec3-7723300db21b-config-volume\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393560 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393585 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k822g\" (UniqueName: \"kubernetes.io/projected/740090e7-79a4-4e3c-b77d-7969f1d327d6-kube-api-access-k822g\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393603 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-trusted-ca-bundle\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.394867 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-trusted-ca-bundle\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.396919 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.398800 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.399675 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.400300 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/240455aa-026d-4291-a205-1451b6e0e397-srv-cert\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.401560 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.402017 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-policies\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.402151 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/17204b6d-9470-46fc-996a-5aab9eaef223-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-l8wnp\" (UID: \"17204b6d-9470-46fc-996a-5aab9eaef223\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.402932 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-oauth-serving-cert\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.403916 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-etcd-client\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.409315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-srv-cert\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.409939 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a3b37351-15c4-4cf3-8af5-1486009713a6-webhook-cert\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.410166 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-audit-dir\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.410651 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-client\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.411277 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.413013 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.393252 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxxmt\" (UniqueName: \"kubernetes.io/projected/c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e-kube-api-access-xxxmt\") pod \"catalog-operator-68c6474976-f8vdn\" (UID: \"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.424357 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e82402a-bf1e-418b-9ec3-7723300db21b-config-volume\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.424868 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/740090e7-79a4-4e3c-b77d-7969f1d327d6-signing-key\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.425036 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/acba609b-3b15-4514-9237-0d7b4faa356a-etcd-service-ca\") pod \"etcd-operator-b45778765-q5ndp\" (UID: \"acba609b-3b15-4514-9237-0d7b4faa356a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.427891 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9f43309b-1eef-471f-8359-d7a35b677818-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.428610 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.429259 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2f8c732f-a717-4c56-8415-06a4b74e3372-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.433009 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/240455aa-026d-4291-a205-1451b6e0e397-profile-collector-cert\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.455117 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-serving-cert\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.462515 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.463058 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-serving-cert\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.463451 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tc5lz\" (UID: \"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.465377 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-trusted-ca\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.482895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-serving-cert\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.483517 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.483735 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.483740 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.486880 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjz4n\" (UniqueName: \"kubernetes.io/projected/9e82402a-bf1e-418b-9ec3-7723300db21b-kube-api-access-bjz4n\") pod \"collect-profiles-29483610-mgbzw\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.488280 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-rk6k5"] Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.488334 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh"] Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.490930 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh9r6\" (UniqueName: \"kubernetes.io/projected/46c59bb9-7544-496f-a38c-1054b3b95ae8-kube-api-access-sh9r6\") pod \"console-f9d7485db-m875t\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.492844 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78bm7\" (UniqueName: \"kubernetes.io/projected/07c163b1-b21e-4905-944b-ea8f34437277-kube-api-access-78bm7\") pod \"migrator-59844c95c7-qs4j8\" (UID: \"07c163b1-b21e-4905-944b-ea8f34437277\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.493067 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/afc63db8-935e-43c5-952b-593f1b1e3350-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7wbqr\" (UID: \"afc63db8-935e-43c5-952b-593f1b1e3350\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.493386 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64df8\" (UniqueName: \"kubernetes.io/projected/17204b6d-9470-46fc-996a-5aab9eaef223-kube-api-access-64df8\") pod \"multus-admission-controller-857f4d67dd-l8wnp\" (UID: \"17204b6d-9470-46fc-996a-5aab9eaef223\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.493878 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z5mz\" (UniqueName: \"kubernetes.io/projected/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-kube-api-access-7z5mz\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494650 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9hpz\" (UniqueName: \"kubernetes.io/projected/0d624414-fb5b-4553-a695-f2f233248e13-kube-api-access-w9hpz\") pod \"dns-operator-744455d44c-5mmz6\" (UID: \"0d624414-fb5b-4553-a695-f2f233248e13\") " pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494688 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-default-certificate\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494709 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-metrics-tls\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494731 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s955r\" (UniqueName: \"kubernetes.io/projected/97d657de-2fc1-4ed1-b0a8-2b239049c70d-kube-api-access-s955r\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494757 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-socket-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494774 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c90c1c25-29e5-416a-af94-db168bd239b0-service-ca-bundle\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494799 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64900be6-8be4-4cd4-8b14-68dfab26a71c-config\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494821 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494832 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xprdr\" (UniqueName: \"kubernetes.io/projected/a3b37351-15c4-4cf3-8af5-1486009713a6-kube-api-access-xprdr\") pod \"packageserver-d55dfcdfc-56k4p\" (UID: \"a3b37351-15c4-4cf3-8af5-1486009713a6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494858 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt256\" (UniqueName: \"kubernetes.io/projected/fddb0541-77a5-4db7-8d2a-0b8e94488823-kube-api-access-nt256\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.494936 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-484w2\" (UniqueName: \"kubernetes.io/projected/06fabbfb-ca52-4980-9478-5fbe09bca884-kube-api-access-484w2\") pod \"marketplace-operator-79b997595-2wknp\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.495062 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-trusted-ca\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.495111 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfnjb\" (UniqueName: \"kubernetes.io/projected/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-kube-api-access-qfnjb\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.495173 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d7f42c73-0135-4720-b94a-a903f5971266-certs\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.495193 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d624414-fb5b-4553-a695-f2f233248e13-metrics-tls\") pod \"dns-operator-744455d44c-5mmz6\" (UID: \"0d624414-fb5b-4553-a695-f2f233248e13\") " pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.495265 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64900be6-8be4-4cd4-8b14-68dfab26a71c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.495523 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ncfc\" (UniqueName: \"kubernetes.io/projected/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-kube-api-access-5ncfc\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.495923 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94gkw\" (UniqueName: \"kubernetes.io/projected/d7f42c73-0135-4720-b94a-a903f5971266-kube-api-access-94gkw\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496029 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d7f42c73-0135-4720-b94a-a903f5971266-node-bootstrap-token\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496059 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltf7w\" (UniqueName: \"kubernetes.io/projected/c90c1c25-29e5-416a-af94-db168bd239b0-kube-api-access-ltf7w\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-registration-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496195 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-stats-auth\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496218 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/97d657de-2fc1-4ed1-b0a8-2b239049c70d-config-volume\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496240 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-plugins-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496267 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-metrics-certs\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496296 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496388 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6d2c8c63-3efc-4ace-9715-0c04fb63a94c-cert\") pod \"ingress-canary-t47vq\" (UID: \"6d2c8c63-3efc-4ace-9715-0c04fb63a94c\") " pod="openshift-ingress-canary/ingress-canary-t47vq" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496428 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-mountpoint-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-csi-data-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496467 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64900be6-8be4-4cd4-8b14-68dfab26a71c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496486 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmqdj\" (UniqueName: \"kubernetes.io/projected/6d2c8c63-3efc-4ace-9715-0c04fb63a94c-kube-api-access-cmqdj\") pod \"ingress-canary-t47vq\" (UID: \"6d2c8c63-3efc-4ace-9715-0c04fb63a94c\") " pod="openshift-ingress-canary/ingress-canary-t47vq" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.496507 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/97d657de-2fc1-4ed1-b0a8-2b239049c70d-metrics-tls\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: E0121 17:35:34.496890 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:34.996875992 +0000 UTC m=+161.623166015 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.497536 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhrpb\" (UniqueName: \"kubernetes.io/projected/9ae40d9a-9494-4907-8b3b-7d2dbff784fe-kube-api-access-zhrpb\") pod \"openshift-controller-manager-operator-756b6f6bc6-tdgbz\" (UID: \"9ae40d9a-9494-4907-8b3b-7d2dbff784fe\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.497749 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lttdv\" (UniqueName: \"kubernetes.io/projected/9f43309b-1eef-471f-8359-d7a35b677818-kube-api-access-lttdv\") pod \"machine-config-controller-84d6567774-f7nd4\" (UID: \"9f43309b-1eef-471f-8359-d7a35b677818\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.507341 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qjbm\" (UniqueName: \"kubernetes.io/projected/2f8c732f-a717-4c56-8415-06a4b74e3372-kube-api-access-6qjbm\") pod \"openshift-apiserver-operator-796bbdcf4f-zjkpx\" (UID: \"2f8c732f-a717-4c56-8415-06a4b74e3372\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.512473 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb895e61-5ea9-45d5-8145-1c82cb8da7bd-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-b6hnk\" (UID: \"cb895e61-5ea9-45d5-8145-1c82cb8da7bd\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.513524 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k822g\" (UniqueName: \"kubernetes.io/projected/740090e7-79a4-4e3c-b77d-7969f1d327d6-kube-api-access-k822g\") pod \"service-ca-9c57cc56f-46bpg\" (UID: \"740090e7-79a4-4e3c-b77d-7969f1d327d6\") " pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.516788 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9-encryption-config\") pod \"apiserver-7bbb656c7d-8zkr4\" (UID: \"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.517088 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-96sxw\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.524099 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqkwb\" (UniqueName: \"kubernetes.io/projected/4a66626d-aee3-4ac0-aa2a-1f8795c431ce-kube-api-access-jqkwb\") pod \"console-operator-58897d9998-mr95r\" (UID: \"4a66626d-aee3-4ac0-aa2a-1f8795c431ce\") " pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.543441 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.543975 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plgfx\" (UniqueName: \"kubernetes.io/projected/dd4d0095-1e20-4fcf-937f-1351374f36c6-kube-api-access-plgfx\") pod \"control-plane-machine-set-operator-78cbb6b69f-nzghk\" (UID: \"dd4d0095-1e20-4fcf-937f-1351374f36c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.545689 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mhbs\" (UniqueName: \"kubernetes.io/projected/240455aa-026d-4291-a205-1451b6e0e397-kube-api-access-7mhbs\") pod \"olm-operator-6b444d44fb-l9zvc\" (UID: \"240455aa-026d-4291-a205-1451b6e0e397\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.548442 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.550644 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f9e0e11-7d5c-496f-bf3f-c78624ce6083-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-266kh\" (UID: \"1f9e0e11-7d5c-496f-bf3f-c78624ce6083\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.571633 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzp92\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-kube-api-access-vzp92\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.593334 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj2sx\" (UniqueName: \"kubernetes.io/projected/ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a-kube-api-access-pj2sx\") pod \"downloads-7954f5f757-6cjlt\" (UID: \"ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a\") " pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.599906 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.600315 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" Jan 21 17:35:34 crc kubenswrapper[4799]: E0121 17:35:34.600360 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.100330838 +0000 UTC m=+161.726620861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.611056 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613535 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d7f42c73-0135-4720-b94a-a903f5971266-node-bootstrap-token\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613575 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltf7w\" (UniqueName: \"kubernetes.io/projected/c90c1c25-29e5-416a-af94-db168bd239b0-kube-api-access-ltf7w\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613599 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-registration-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613648 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-stats-auth\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613664 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/97d657de-2fc1-4ed1-b0a8-2b239049c70d-config-volume\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613679 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-plugins-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613696 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-metrics-certs\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613719 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613752 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6d2c8c63-3efc-4ace-9715-0c04fb63a94c-cert\") pod \"ingress-canary-t47vq\" (UID: \"6d2c8c63-3efc-4ace-9715-0c04fb63a94c\") " pod="openshift-ingress-canary/ingress-canary-t47vq" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613781 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-mountpoint-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613809 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-csi-data-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613833 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64900be6-8be4-4cd4-8b14-68dfab26a71c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613860 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmqdj\" (UniqueName: \"kubernetes.io/projected/6d2c8c63-3efc-4ace-9715-0c04fb63a94c-kube-api-access-cmqdj\") pod \"ingress-canary-t47vq\" (UID: \"6d2c8c63-3efc-4ace-9715-0c04fb63a94c\") " pod="openshift-ingress-canary/ingress-canary-t47vq" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613877 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/97d657de-2fc1-4ed1-b0a8-2b239049c70d-metrics-tls\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613899 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9hpz\" (UniqueName: \"kubernetes.io/projected/0d624414-fb5b-4553-a695-f2f233248e13-kube-api-access-w9hpz\") pod \"dns-operator-744455d44c-5mmz6\" (UID: \"0d624414-fb5b-4553-a695-f2f233248e13\") " pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613919 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-default-certificate\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613946 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-metrics-tls\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.613976 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s955r\" (UniqueName: \"kubernetes.io/projected/97d657de-2fc1-4ed1-b0a8-2b239049c70d-kube-api-access-s955r\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.614024 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-socket-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.614049 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c90c1c25-29e5-416a-af94-db168bd239b0-service-ca-bundle\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.614081 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64900be6-8be4-4cd4-8b14-68dfab26a71c-config\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: E0121 17:35:34.618278 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.118260203 +0000 UTC m=+161.744550226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.614117 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.618709 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-mountpoint-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.618763 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt256\" (UniqueName: \"kubernetes.io/projected/fddb0541-77a5-4db7-8d2a-0b8e94488823-kube-api-access-nt256\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.618800 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-trusted-ca\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.618819 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfnjb\" (UniqueName: \"kubernetes.io/projected/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-kube-api-access-qfnjb\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.618852 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d7f42c73-0135-4720-b94a-a903f5971266-certs\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.618872 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d624414-fb5b-4553-a695-f2f233248e13-metrics-tls\") pod \"dns-operator-744455d44c-5mmz6\" (UID: \"0d624414-fb5b-4553-a695-f2f233248e13\") " pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.618893 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64900be6-8be4-4cd4-8b14-68dfab26a71c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.618913 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94gkw\" (UniqueName: \"kubernetes.io/projected/d7f42c73-0135-4720-b94a-a903f5971266-kube-api-access-94gkw\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.619651 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-socket-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.620853 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-metrics-certs\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.621587 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/97d657de-2fc1-4ed1-b0a8-2b239049c70d-config-volume\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.621647 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-plugins-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.623000 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-trusted-ca\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.623723 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c90c1c25-29e5-416a-af94-db168bd239b0-service-ca-bundle\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.626463 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.627108 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-csi-data-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.627156 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.627640 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/fddb0541-77a5-4db7-8d2a-0b8e94488823-registration-dir\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.628118 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64900be6-8be4-4cd4-8b14-68dfab26a71c-config\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.629097 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d7f42c73-0135-4720-b94a-a903f5971266-node-bootstrap-token\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.631446 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d624414-fb5b-4553-a695-f2f233248e13-metrics-tls\") pod \"dns-operator-744455d44c-5mmz6\" (UID: \"0d624414-fb5b-4553-a695-f2f233248e13\") " pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.631506 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.632111 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64900be6-8be4-4cd4-8b14-68dfab26a71c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.632793 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/97d657de-2fc1-4ed1-b0a8-2b239049c70d-metrics-tls\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.635687 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-default-certificate\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.636537 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d7f42c73-0135-4720-b94a-a903f5971266-certs\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.640618 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c90c1c25-29e5-416a-af94-db168bd239b0-stats-auth\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.641889 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94gkw\" (UniqueName: \"kubernetes.io/projected/d7f42c73-0135-4720-b94a-a903f5971266-kube-api-access-94gkw\") pod \"machine-config-server-gxpvf\" (UID: \"d7f42c73-0135-4720-b94a-a903f5971266\") " pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.643040 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.643192 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-metrics-tls\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.644436 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-w2n7v"] Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.651296 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6d2c8c63-3efc-4ace-9715-0c04fb63a94c-cert\") pod \"ingress-canary-t47vq\" (UID: \"6d2c8c63-3efc-4ace-9715-0c04fb63a94c\") " pod="openshift-ingress-canary/ingress-canary-t47vq" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.659775 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.666861 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.669598 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.672437 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfnjb\" (UniqueName: \"kubernetes.io/projected/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-kube-api-access-qfnjb\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.680675 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.682116 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt"] Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.686938 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.687094 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1c11c6a2-f364-43c0-8bbc-a0bb360795e1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jnshm\" (UID: \"1c11c6a2-f364-43c0-8bbc-a0bb360795e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.694805 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.702839 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.705421 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s955r\" (UniqueName: \"kubernetes.io/projected/97d657de-2fc1-4ed1-b0a8-2b239049c70d-kube-api-access-s955r\") pod \"dns-default-sm66d\" (UID: \"97d657de-2fc1-4ed1-b0a8-2b239049c70d\") " pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.711202 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.722086 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.729271 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:34 crc kubenswrapper[4799]: E0121 17:35:34.729819 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.229787618 +0000 UTC m=+161.856077641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.735119 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.739414 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltf7w\" (UniqueName: \"kubernetes.io/projected/c90c1c25-29e5-416a-af94-db168bd239b0-kube-api-access-ltf7w\") pod \"router-default-5444994796-q6bfd\" (UID: \"c90c1c25-29e5-416a-af94-db168bd239b0\") " pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.744944 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.755055 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.764214 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.773442 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.780990 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.827495 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.830654 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.832623 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: E0121 17:35:34.833114 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.333093719 +0000 UTC m=+161.959383742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.868260 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gxpvf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.869324 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.871423 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64900be6-8be4-4cd4-8b14-68dfab26a71c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-csxlf\" (UID: \"64900be6-8be4-4cd4-8b14-68dfab26a71c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.879299 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmqdj\" (UniqueName: \"kubernetes.io/projected/6d2c8c63-3efc-4ace-9715-0c04fb63a94c-kube-api-access-cmqdj\") pod \"ingress-canary-t47vq\" (UID: \"6d2c8c63-3efc-4ace-9715-0c04fb63a94c\") " pod="openshift-ingress-canary/ingress-canary-t47vq" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.885467 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9hpz\" (UniqueName: \"kubernetes.io/projected/0d624414-fb5b-4553-a695-f2f233248e13-kube-api-access-w9hpz\") pod \"dns-operator-744455d44c-5mmz6\" (UID: \"0d624414-fb5b-4553-a695-f2f233248e13\") " pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.886173 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt256\" (UniqueName: \"kubernetes.io/projected/fddb0541-77a5-4db7-8d2a-0b8e94488823-kube-api-access-nt256\") pod \"csi-hostpathplugin-j254q\" (UID: \"fddb0541-77a5-4db7-8d2a-0b8e94488823\") " pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.897190 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wr5c4"] Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.933935 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:34 crc kubenswrapper[4799]: E0121 17:35:34.934335 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.434258643 +0000 UTC m=+162.060548666 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:34 crc kubenswrapper[4799]: I0121 17:35:34.938905 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:34 crc kubenswrapper[4799]: E0121 17:35:34.939396 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.439377662 +0000 UTC m=+162.065667685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.068399 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.069333 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.569279728 +0000 UTC m=+162.195569751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.091228 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb"] Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.092783 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.103369 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" event={"ID":"bb70641c-df77-4b31-bc8d-d996213797cc","Type":"ContainerStarted","Data":"309521325e44ec590c3397bf6a594ddf73cb8033078e74665adb4664e80026c1"} Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.104681 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj"] Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.105579 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.125345 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-t47vq" Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.135159 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" event={"ID":"38e1abaa-9da0-4924-a6b5-ee9617cf304d","Type":"ContainerStarted","Data":"6a7a21cc4be98b122b5ed27022cb20941df8edb42f6a84421caad4645c290c78"} Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.141712 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" event={"ID":"14f1d8ff-d287-44f6-8427-2cc844cab8d1","Type":"ContainerStarted","Data":"a963beb2ba0b3c217743e1a2e034784cfc5ff24b694cf5ca604bc90633d74a98"} Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.149860 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-j254q" Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.187878 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-mfdx4"] Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.195332 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.233337 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.733314812 +0000 UTC m=+162.359604835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.255419 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d"] Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.305503 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.306782 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.806751518 +0000 UTC m=+162.433041541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: W0121 17:35:35.355896 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod118f3348_2c20_44d1_96dc_03b2aa49cea2.slice/crio-063d356ba5da045783830b10195f9e483cddbedf4a2f1dae7a6c5e4be03525e7 WatchSource:0}: Error finding container 063d356ba5da045783830b10195f9e483cddbedf4a2f1dae7a6c5e4be03525e7: Status 404 returned error can't find the container with id 063d356ba5da045783830b10195f9e483cddbedf4a2f1dae7a6c5e4be03525e7 Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.409308 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.409676 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:35.909661738 +0000 UTC m=+162.535951761 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: W0121 17:35:35.522720 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ee870b9_12a2_466c_a4a2_697c9d8c9918.slice/crio-5df746647b806922cb83750825e92069d03ac02de8f35019f93140bf2af1fd23 WatchSource:0}: Error finding container 5df746647b806922cb83750825e92069d03ac02de8f35019f93140bf2af1fd23: Status 404 returned error can't find the container with id 5df746647b806922cb83750825e92069d03ac02de8f35019f93140bf2af1fd23 Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.523457 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.523917 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.023895269 +0000 UTC m=+162.650185292 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.625199 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.625772 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.125753885 +0000 UTC m=+162.752043908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.726475 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-m875t"] Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.736920 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.737353 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.237329053 +0000 UTC m=+162.863619076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.844549 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.845144 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.345109202 +0000 UTC m=+162.971399225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.946461 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:35 crc kubenswrapper[4799]: E0121 17:35:35.947173 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.447106972 +0000 UTC m=+163.073396995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.962468 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk"] Jan 21 17:35:35 crc kubenswrapper[4799]: I0121 17:35:35.967295 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx"] Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:35.998150 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mr95r"] Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.037472 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn"] Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.048313 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.048727 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.548715231 +0000 UTC m=+163.175005254 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.049006 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4"] Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.057604 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk"] Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.083792 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc"] Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.150024 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.150185 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.650152043 +0000 UTC m=+163.276442066 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.151478 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.152029 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.652008731 +0000 UTC m=+163.278298944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.176316 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" event={"ID":"b0eabe68-5cf0-4bc2-8578-02f73622072b","Type":"ContainerStarted","Data":"199135f2b25da9388a6b296a13d3cf8e49bfd5b3ff2061627cc7329ff57a4b5a"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.185084 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" event={"ID":"e45b169d-862b-4326-a005-063cead60ac4","Type":"ContainerStarted","Data":"e434a7b39b86229a24a1e5ad0b85ce42f00e1748bc040e4be64cb04e2b2881e3"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.186235 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" event={"ID":"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd","Type":"ContainerStarted","Data":"390f100aadc60dd97c36d0af78b23e1b093e8200cc1870a7e3204997d99ef707"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.186840 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" event={"ID":"9a6e3985-ef4a-451a-90cf-4b313527298c","Type":"ContainerStarted","Data":"a6519b631dfef1b7bd0eab44a0fa56980796090daf66a2b25172289a90e1d851"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.189104 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" event={"ID":"38e1abaa-9da0-4924-a6b5-ee9617cf304d","Type":"ContainerStarted","Data":"380d483540e55bbeebecd87c81927c4b2aacdc9dd3990f66c2fd4e758811b22f"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.191403 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"05c27abdb071deabab0fdc8dd955d80ced9da132cded7b7455aaf6bc55c8bdad"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.193607 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" event={"ID":"118f3348-2c20-44d1-96dc-03b2aa49cea2","Type":"ContainerStarted","Data":"063d356ba5da045783830b10195f9e483cddbedf4a2f1dae7a6c5e4be03525e7"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.252153 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.252463 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.752438942 +0000 UTC m=+163.378728965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: W0121 17:35:36.321249 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f8c732f_a717_4c56_8415_06a4b74e3372.slice/crio-1d895e50910a6a58cb0cc2ff5e07fa6cce10aaae1488f3f6c839a14395e15d14 WatchSource:0}: Error finding container 1d895e50910a6a58cb0cc2ff5e07fa6cce10aaae1488f3f6c839a14395e15d14: Status 404 returned error can't find the container with id 1d895e50910a6a58cb0cc2ff5e07fa6cce10aaae1488f3f6c839a14395e15d14 Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.354041 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.354876 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.854850405 +0000 UTC m=+163.481140478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.434921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" event={"ID":"bb70641c-df77-4b31-bc8d-d996213797cc","Type":"ContainerStarted","Data":"e66a9d88ccfc19fff4bbda7ba160d147af35514c550987326080b1a16d166f51"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.434980 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ffa4cfe45e8dc37cbbb9b6281b3d544d32113f8910addd11649c1b8b79565f0b"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.435002 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-q6bfd" event={"ID":"c90c1c25-29e5-416a-af94-db168bd239b0","Type":"ContainerStarted","Data":"59d3d6cf83c279f5903e61899fc08afa3ae4571fa8ac304cceed526e8771ceb1"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.435018 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" event={"ID":"8ee870b9-12a2-466c-a4a2-697c9d8c9918","Type":"ContainerStarted","Data":"5df746647b806922cb83750825e92069d03ac02de8f35019f93140bf2af1fd23"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.435032 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gxpvf" event={"ID":"d7f42c73-0135-4720-b94a-a903f5971266","Type":"ContainerStarted","Data":"e50f004526cbd4a03f326840d7201418f5b50ede7fe17065fcd58e12b759c3cd"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.435044 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" event={"ID":"9f4ff0b4-f3da-4cab-a054-970565d09713","Type":"ContainerStarted","Data":"88c0541d41ec4fcb9b42daae2918d367f971ff5d35a9256ae131b4b6d52b56e1"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.435055 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-m875t" event={"ID":"46c59bb9-7544-496f-a38c-1054b3b95ae8","Type":"ContainerStarted","Data":"22a071ac9915e4283b81d441ac7a164a56ac8dacc8418c9fbcc85a06c6be238e"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.435068 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d7cae210dbc3c569f90da6a0dd8aeb1761b95b1697c12b2c6d852687ef23a282"} Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.455204 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.456075 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:36.955959838 +0000 UTC m=+163.582249851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.610810 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.611257 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.111240609 +0000 UTC m=+163.737530632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.630018 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-bwn6r" podStartSLOduration=135.629979319 podStartE2EDuration="2m15.629979319s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:36.627283096 +0000 UTC m=+163.253573119" watchObservedRunningTime="2026-01-21 17:35:36.629979319 +0000 UTC m=+163.256269352" Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.712214 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.712779 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.212742004 +0000 UTC m=+163.839032027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.713049 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.713558 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.213541458 +0000 UTC m=+163.839831481 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.727874 4799 csr.go:261] certificate signing request csr-rklnk is approved, waiting to be issued Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.739096 4799 csr.go:257] certificate signing request csr-rklnk is issued Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.814043 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.814476 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.314434974 +0000 UTC m=+163.940725007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.814544 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.815018 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.315009412 +0000 UTC m=+163.941299435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.915851 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.916408 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.416357022 +0000 UTC m=+164.042647065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:36 crc kubenswrapper[4799]: I0121 17:35:36.916611 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:36 crc kubenswrapper[4799]: E0121 17:35:36.917111 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.417100165 +0000 UTC m=+164.043390188 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.017464 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.017677 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.51763941 +0000 UTC m=+164.143929433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.018105 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.018548 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.518540528 +0000 UTC m=+164.144830551 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.119269 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.119496 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.619453684 +0000 UTC m=+164.245743707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.120039 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.120502 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.620493536 +0000 UTC m=+164.246783559 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.220894 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.221377 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.721354461 +0000 UTC m=+164.347644484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.327787 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.328410 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.828392938 +0000 UTC m=+164.454682971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.331085 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" event={"ID":"240455aa-026d-4291-a205-1451b6e0e397","Type":"ContainerStarted","Data":"f8a758c47f632da475f4465fee59fbb4a2c80b2268c6ec08b6a274add4889b54"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.378793 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" event={"ID":"2f8c732f-a717-4c56-8415-06a4b74e3372","Type":"ContainerStarted","Data":"1d895e50910a6a58cb0cc2ff5e07fa6cce10aaae1488f3f6c839a14395e15d14"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.384820 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4e6fd1ba0e2227d53533f8b7684b41a045456dbdeb59a4403d033d8210081051"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.385149 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.394387 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" event={"ID":"dd4d0095-1e20-4fcf-937f-1351374f36c6","Type":"ContainerStarted","Data":"62d09877942b076404214dc75fbfad380639c6643bd3f3fe62ff94e7284258b4"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.400447 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" event={"ID":"e45b169d-862b-4326-a005-063cead60ac4","Type":"ContainerStarted","Data":"a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.403850 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.405063 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" event={"ID":"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e","Type":"ContainerStarted","Data":"8e508e9e406d790de0d0e2ccc416bbd0f58f4cdc12094bd0221413c93488425f"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.417179 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" event={"ID":"118f3348-2c20-44d1-96dc-03b2aa49cea2","Type":"ContainerStarted","Data":"df4e932580e87b02f03440baa874a61c03fc54fc2e3da0023ed0ca6810c28c2e"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.425930 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" event={"ID":"cb895e61-5ea9-45d5-8145-1c82cb8da7bd","Type":"ContainerStarted","Data":"577a09f48858822e38e7691f37cfa2642e87155ffa3763c7a09a72dfa8f34b21"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.427046 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" event={"ID":"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9","Type":"ContainerStarted","Data":"60747a7d7e4af17b88491189519af785efa147236df3b62776cc23072a5755fc"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.428585 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.429067 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:37.929047626 +0000 UTC m=+164.555337649 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.429247 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" event={"ID":"b0eabe68-5cf0-4bc2-8578-02f73622072b","Type":"ContainerStarted","Data":"08157d454f5f702e29711ec00dd15638d3bb55e1c7e165e6bf22dbe1a9d158a5"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.433742 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mr95r" event={"ID":"4a66626d-aee3-4ac0-aa2a-1f8795c431ce","Type":"ContainerStarted","Data":"fecc3b18646d74580fc6bf8d4790840ff206360c8a70ce5f5180098ed5b7f7ac"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.450425 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" event={"ID":"14f1d8ff-d287-44f6-8427-2cc844cab8d1","Type":"ContainerStarted","Data":"104868b401b084feaac8b122af7bca078546578e7c9aaeae83d8cacf98e94725"} Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.474938 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-mfdx4" podStartSLOduration=136.474914247 podStartE2EDuration="2m16.474914247s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:37.438080466 +0000 UTC m=+164.064370509" watchObservedRunningTime="2026-01-21 17:35:37.474914247 +0000 UTC m=+164.101204270" Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.530859 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.531440 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.031409717 +0000 UTC m=+164.657699740 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.645427 4799 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-wr5c4 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.645601 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" podUID="e45b169d-862b-4326-a005-063cead60ac4" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.647753 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.648008 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.147970289 +0000 UTC m=+164.774260312 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.648095 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.648525 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.148517656 +0000 UTC m=+164.774807679 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.743617 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-21 17:30:36 +0000 UTC, rotation deadline is 2026-10-15 21:37:06.893313161 +0000 UTC Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.743664 4799 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6412h1m29.149653488s for next certificate rotation Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.750914 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.751540 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.251519217 +0000 UTC m=+164.877809240 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.856007 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.856553 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.3565286 +0000 UTC m=+164.982818623 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:37 crc kubenswrapper[4799]: I0121 17:35:37.956818 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:37 crc kubenswrapper[4799]: E0121 17:35:37.957101 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.457056585 +0000 UTC m=+165.083346648 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.089707 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.090994 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.590943473 +0000 UTC m=+165.217233496 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.317573 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.320387 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.82033001 +0000 UTC m=+165.446620053 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.394766 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" podStartSLOduration=136.394716224 podStartE2EDuration="2m16.394716224s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:37.475438173 +0000 UTC m=+164.101728196" watchObservedRunningTime="2026-01-21 17:35:38.394716224 +0000 UTC m=+165.021006257" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.402454 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.417811 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-chqwl"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.421698 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.422328 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:38.922312749 +0000 UTC m=+165.548602772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.456655 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-46bpg"] Jan 21 17:35:38 crc kubenswrapper[4799]: W0121 17:35:38.482612 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3b37351_15c4_4cf3_8af5_1486009713a6.slice/crio-98c37469457a9055728bf72367c40351ba1ced75d4dbae4b206eae3b17bd1a88 WatchSource:0}: Error finding container 98c37469457a9055728bf72367c40351ba1ced75d4dbae4b206eae3b17bd1a88: Status 404 returned error can't find the container with id 98c37469457a9055728bf72367c40351ba1ced75d4dbae4b206eae3b17bd1a88 Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.512732 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.519343 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.537323 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-l8wnp"] Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.528233 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.02820801 +0000 UTC m=+165.654498033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.528064 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.540520 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.552455 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-6cjlt"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.553240 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.559912 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-96sxw"] Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.562758 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.062725939 +0000 UTC m=+165.689015962 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.567494 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.570713 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mr95r" event={"ID":"4a66626d-aee3-4ac0-aa2a-1f8795c431ce","Type":"ContainerStarted","Data":"0341c62e339ac4412456df1d055cc562fbb8f612c06ea6dcc2fcf1f2452f2e88"} Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.572117 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.583398 4799 patch_prober.go:28] interesting pod/console-operator-58897d9998-mr95r container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/readyz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.583481 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-mr95r" podUID="4a66626d-aee3-4ac0-aa2a-1f8795c431ce" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/readyz\": dial tcp 10.217.0.28:8443: connect: connection refused" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.583975 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.585551 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.587738 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q5ndp"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.592757 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.683056 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.685296 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.185244215 +0000 UTC m=+165.811534418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.689577 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5mmz6"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.712512 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" event={"ID":"dd4d0095-1e20-4fcf-937f-1351374f36c6","Type":"ContainerStarted","Data":"13ae4ef3bed80f30f056a194d6278dfdea6a22c2cf97a12f9b0253382c06d8d4"} Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.717068 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.745813 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-mr95r" podStartSLOduration=137.74575789 podStartE2EDuration="2m17.74575789s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:38.72864147 +0000 UTC m=+165.354931503" watchObservedRunningTime="2026-01-21 17:35:38.74575789 +0000 UTC m=+165.372047913" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.750781 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-t47vq"] Jan 21 17:35:38 crc kubenswrapper[4799]: W0121 17:35:38.761911 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f43309b_1eef_471f_8359_d7a35b677818.slice/crio-482676bfd756b42dcc947a4dfaa3723da19d9b8ec1ef42f41113e5ff08c3b3ec WatchSource:0}: Error finding container 482676bfd756b42dcc947a4dfaa3723da19d9b8ec1ef42f41113e5ff08c3b3ec: Status 404 returned error can't find the container with id 482676bfd756b42dcc947a4dfaa3723da19d9b8ec1ef42f41113e5ff08c3b3ec Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.763137 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-j254q"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.801011 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" event={"ID":"9a6e3985-ef4a-451a-90cf-4b313527298c","Type":"ContainerStarted","Data":"0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838"} Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.802005 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.803434 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.803671 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nzghk" podStartSLOduration=136.803646794 podStartE2EDuration="2m16.803646794s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:38.789609789 +0000 UTC m=+165.415899822" watchObservedRunningTime="2026-01-21 17:35:38.803646794 +0000 UTC m=+165.429936817" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.819659 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2wknp"] Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.823191 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.323174459 +0000 UTC m=+165.949464482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: W0121 17:35:38.843683 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d2c8c63_3efc_4ace_9715_0c04fb63a94c.slice/crio-595c9f596293b5b190b952d4da448b92b0c6b34a48c8773fcd8c5263d919146e WatchSource:0}: Error finding container 595c9f596293b5b190b952d4da448b92b0c6b34a48c8773fcd8c5263d919146e: Status 404 returned error can't find the container with id 595c9f596293b5b190b952d4da448b92b0c6b34a48c8773fcd8c5263d919146e Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.853348 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.856342 4799 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-59kzj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.856437 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" podUID="9a6e3985-ef4a-451a-90cf-4b313527298c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.872275 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-sm66d"] Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.873883 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" podStartSLOduration=136.873846199 podStartE2EDuration="2m16.873846199s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:38.841145275 +0000 UTC m=+165.467435308" watchObservedRunningTime="2026-01-21 17:35:38.873846199 +0000 UTC m=+165.500136212" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.903535 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" podStartSLOduration=137.903500707 podStartE2EDuration="2m17.903500707s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:38.872140326 +0000 UTC m=+165.498430359" watchObservedRunningTime="2026-01-21 17:35:38.903500707 +0000 UTC m=+165.529790730" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.910295 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-q6bfd" event={"ID":"c90c1c25-29e5-416a-af94-db168bd239b0","Type":"ContainerStarted","Data":"dc00347774a38ae13aac824220b00c43e1e046933c5c72c5f0e31fdc9ce09933"} Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.911593 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.911799 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.411747442 +0000 UTC m=+166.038037465 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.912669 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:38 crc kubenswrapper[4799]: E0121 17:35:38.914616 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.414605021 +0000 UTC m=+166.040895044 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.942695 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-gxpvf" podStartSLOduration=7.942669061 podStartE2EDuration="7.942669061s" podCreationTimestamp="2026-01-21 17:35:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:38.909909245 +0000 UTC m=+165.536199268" watchObservedRunningTime="2026-01-21 17:35:38.942669061 +0000 UTC m=+165.568959084" Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.943338 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d878e29f62541296e515183af9335e0893e8ab7dafe1a37a16cb49c52a3ab849"} Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.983046 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4ddf77e4481f7b25234a9f1e5aac6c370ddd4d6b92ffdc28300b26c389a052ac"} Jan 21 17:35:38 crc kubenswrapper[4799]: I0121 17:35:38.994432 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-q6bfd" podStartSLOduration=136.994391103 podStartE2EDuration="2m16.994391103s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:38.964533618 +0000 UTC m=+165.590823641" watchObservedRunningTime="2026-01-21 17:35:38.994391103 +0000 UTC m=+165.620681126" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.020752 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.022421 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.522394481 +0000 UTC m=+166.148684504 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.032636 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" event={"ID":"45703959-9502-44ab-a19e-19d702259346","Type":"ContainerStarted","Data":"574ffe550373237967260f15bc350d1578ffe5a51c9443a8f0a5c32027b2ecb5"} Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.079289 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" event={"ID":"9f4ff0b4-f3da-4cab-a054-970565d09713","Type":"ContainerStarted","Data":"619045f5720714b30c6c7c8d4ad06f6e6cc306a061cd852118a8ae3019823365"} Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.100003 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" event={"ID":"2f8c732f-a717-4c56-8415-06a4b74e3372","Type":"ContainerStarted","Data":"d2f493317d5f71819d4295c57008cc1c646669758a30fd7c3e946db5b23cc007"} Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.123296 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.124035 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.624014139 +0000 UTC m=+166.250304162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.144901 4799 generic.go:334] "Generic (PLEG): container finished" podID="f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd" containerID="f6c530ccf78f5daa71963fc31500a8032626232194d74a2ea9e1cf11e9f93a6d" exitCode=0 Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.146031 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" event={"ID":"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd","Type":"ContainerDied","Data":"f6c530ccf78f5daa71963fc31500a8032626232194d74a2ea9e1cf11e9f93a6d"} Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.148221 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" event={"ID":"c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e","Type":"ContainerStarted","Data":"fac3996a79f89ff255a922d7e437650db14409ec0a92d07448049d99574b9d27"} Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.148946 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.150737 4799 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-f8vdn container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.150781 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" podUID="c76bfd6c-4db1-4bcf-8641-9bfc0edffe3e" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.165815 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" event={"ID":"240455aa-026d-4291-a205-1451b6e0e397","Type":"ContainerStarted","Data":"5c90090f066c8b40adecb625f4cd445f0dbcf5e3ef1b64bb6c181f4e1b683d40"} Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.167042 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.194606 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" event={"ID":"38e1abaa-9da0-4924-a6b5-ee9617cf304d","Type":"ContainerStarted","Data":"f169192034617894c401925437f87812325642143b592897e031c5928336e2f5"} Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.227650 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.228666 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.728642051 +0000 UTC m=+166.354932074 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.238033 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zjkpx" podStartSLOduration=138.238011611 podStartE2EDuration="2m18.238011611s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:39.17504969 +0000 UTC m=+165.801339713" watchObservedRunningTime="2026-01-21 17:35:39.238011611 +0000 UTC m=+165.864301634" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.238261 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s96kb" podStartSLOduration=137.238256879 podStartE2EDuration="2m17.238256879s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:39.236615398 +0000 UTC m=+165.862905421" watchObservedRunningTime="2026-01-21 17:35:39.238256879 +0000 UTC m=+165.864546902" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.251325 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.270375 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" event={"ID":"b0eabe68-5cf0-4bc2-8578-02f73622072b","Type":"ContainerStarted","Data":"a1844b05263fe3d7e9a0b71c3da501c146cfa1914624842287de8df0596f8501"} Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.271796 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.329181 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.332485 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.832464706 +0000 UTC m=+166.458754729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.335422 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.338528 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" podStartSLOduration=137.338502543 podStartE2EDuration="2m17.338502543s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:39.321504997 +0000 UTC m=+165.947795050" watchObservedRunningTime="2026-01-21 17:35:39.338502543 +0000 UTC m=+165.964792586" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.430897 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.431438 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:39.931418512 +0000 UTC m=+166.557708535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.482274 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-m875t" podStartSLOduration=138.482251407 podStartE2EDuration="2m18.482251407s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:39.480782441 +0000 UTC m=+166.107072464" watchObservedRunningTime="2026-01-21 17:35:39.482251407 +0000 UTC m=+166.108541430" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.537961 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-l9zvc" podStartSLOduration=137.537940912 podStartE2EDuration="2m17.537940912s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:39.53593581 +0000 UTC m=+166.162225843" watchObservedRunningTime="2026-01-21 17:35:39.537940912 +0000 UTC m=+166.164230935" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.539967 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.540640 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.040621085 +0000 UTC m=+166.666911108 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.611524 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-rk6k5" podStartSLOduration=137.611472421 podStartE2EDuration="2m17.611472421s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:39.610616284 +0000 UTC m=+166.236906317" watchObservedRunningTime="2026-01-21 17:35:39.611472421 +0000 UTC m=+166.237762444" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.645550 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.646106 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.146076773 +0000 UTC m=+166.772366806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.646296 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.646827 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.146811835 +0000 UTC m=+166.773101858 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.697301 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" podStartSLOduration=137.697275459 podStartE2EDuration="2m17.697275459s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:39.696636049 +0000 UTC m=+166.322926092" watchObservedRunningTime="2026-01-21 17:35:39.697275459 +0000 UTC m=+166.323565482" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.732260 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" podStartSLOduration=137.732239202 podStartE2EDuration="2m17.732239202s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:39.728965131 +0000 UTC m=+166.355255174" watchObservedRunningTime="2026-01-21 17:35:39.732239202 +0000 UTC m=+166.358529235" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.759715 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.760674 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.260646152 +0000 UTC m=+166.886936175 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.977289 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.982469 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:39 crc kubenswrapper[4799]: E0121 17:35:39.983038 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.483020352 +0000 UTC m=+167.109310375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.986501 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:39 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:39 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:39 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:39 crc kubenswrapper[4799]: I0121 17:35:39.986914 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.102869 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:40 crc kubenswrapper[4799]: E0121 17:35:40.103364 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.60333888 +0000 UTC m=+167.229628903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.213173 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:40 crc kubenswrapper[4799]: E0121 17:35:40.214537 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.714500924 +0000 UTC m=+167.340790947 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.356464 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:40 crc kubenswrapper[4799]: E0121 17:35:40.356967 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.856938267 +0000 UTC m=+167.483228290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.386974 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" event={"ID":"afc63db8-935e-43c5-952b-593f1b1e3350","Type":"ContainerStarted","Data":"fc49c2c0f1e5e82b21e6876d91d14b8c85e83b9254c6013359646e84edc166f2"} Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.411954 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" event={"ID":"9e82402a-bf1e-418b-9ec3-7723300db21b","Type":"ContainerStarted","Data":"c6c8d1f47edb7357f362b77647a7ddcd02472436da632cc70d23896727c2ccd8"} Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.461222 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:40 crc kubenswrapper[4799]: E0121 17:35:40.461703 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:40.961683692 +0000 UTC m=+167.587973725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.497790 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" event={"ID":"64900be6-8be4-4cd4-8b14-68dfab26a71c","Type":"ContainerStarted","Data":"ca6de8a039f83499e9e3442f6401a6e074103476f00e976e4b547aa07ebdfd5d"} Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.502879 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" event={"ID":"1c11c6a2-f364-43c0-8bbc-a0bb360795e1","Type":"ContainerStarted","Data":"11f8180ceb786eec36a7952ab9f9ef384e0ef9ac220009ad0def36bef19b8b76"} Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.565219 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:40 crc kubenswrapper[4799]: E0121 17:35:40.565862 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.065841699 +0000 UTC m=+167.692131722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.660511 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" event={"ID":"9ae40d9a-9494-4907-8b3b-7d2dbff784fe","Type":"ContainerStarted","Data":"3e0355b64e7cba3ecd80d452e0746ddf6cf07257b3e4f06e48b7ef5756469425"} Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.660615 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" event={"ID":"9ae40d9a-9494-4907-8b3b-7d2dbff784fe","Type":"ContainerStarted","Data":"d45bf25be996d24d7654d6bf0889723bd28ddea42e34d266255e091b68b0537e"} Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.669923 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:40 crc kubenswrapper[4799]: E0121 17:35:40.670545 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.170509552 +0000 UTC m=+167.796799575 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:40 crc kubenswrapper[4799]: I0121 17:35:40.696465 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" event={"ID":"acba609b-3b15-4514-9237-0d7b4faa356a","Type":"ContainerStarted","Data":"0dacddccdd1c21db379c1f4d7bb63731ac5d5dffc0887485561404425879cf15"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.011757 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:41 crc kubenswrapper[4799]: E0121 17:35:41.012324 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.51227631 +0000 UTC m=+168.138566333 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.012531 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:41 crc kubenswrapper[4799]: E0121 17:35:41.014468 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.514452228 +0000 UTC m=+168.140742251 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.084097 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" event={"ID":"9f43309b-1eef-471f-8359-d7a35b677818","Type":"ContainerStarted","Data":"482676bfd756b42dcc947a4dfaa3723da19d9b8ec1ef42f41113e5ff08c3b3ec"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.084475 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:41 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:41 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:41 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.084559 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.091670 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" event={"ID":"17204b6d-9470-46fc-996a-5aab9eaef223","Type":"ContainerStarted","Data":"954d476aa82813527fca655df7a90e853a5f3b3924e004b55802961bb5403f33"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.094504 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" event={"ID":"1f9e0e11-7d5c-496f-bf3f-c78624ce6083","Type":"ContainerStarted","Data":"275a225ab9c3f5ec2e82671fb5a993e3e29c9030a44313ffbd24cd4309c80d51"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.098464 4799 generic.go:334] "Generic (PLEG): container finished" podID="8ee870b9-12a2-466c-a4a2-697c9d8c9918" containerID="32496d89d9a322711e1b592562250924427fb86cb893ad254b00f42145b870f6" exitCode=0 Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.098538 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" event={"ID":"8ee870b9-12a2-466c-a4a2-697c9d8c9918","Type":"ContainerDied","Data":"32496d89d9a322711e1b592562250924427fb86cb893ad254b00f42145b870f6"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.119954 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:41 crc kubenswrapper[4799]: E0121 17:35:41.120418 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.62038748 +0000 UTC m=+168.246677503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.120540 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:41 crc kubenswrapper[4799]: E0121 17:35:41.121044 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.621017209 +0000 UTC m=+168.247307232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.121758 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" event={"ID":"740090e7-79a4-4e3c-b77d-7969f1d327d6","Type":"ContainerStarted","Data":"a8fe462fb2b53598cf961a6c7ef93f74c64b59df137a687d27214d390a77480f"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.121838 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" event={"ID":"740090e7-79a4-4e3c-b77d-7969f1d327d6","Type":"ContainerStarted","Data":"154b728ba03e852913a703d1fab46a05b9e3c488006e5c0d27913aee233aaeb0"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.143873 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tdgbz" podStartSLOduration=139.143825496 podStartE2EDuration="2m19.143825496s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:41.010479194 +0000 UTC m=+167.636769217" watchObservedRunningTime="2026-01-21 17:35:41.143825496 +0000 UTC m=+167.770115519" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.177314 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-m875t" event={"ID":"46c59bb9-7544-496f-a38c-1054b3b95ae8","Type":"ContainerStarted","Data":"e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.190115 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" event={"ID":"06fabbfb-ca52-4980-9478-5fbe09bca884","Type":"ContainerStarted","Data":"cc6f950e915aa62fac121f5f4fc749c772fad01d63ed2ee3bb06a4067cc35f46"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.192308 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-b6hnk" event={"ID":"cb895e61-5ea9-45d5-8145-1c82cb8da7bd","Type":"ContainerStarted","Data":"e043f5093e797ffa5420ca4a2176eb70475a751d910caf88d63c8f8b7eebe5c8"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.215609 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" event={"ID":"07c163b1-b21e-4905-944b-ea8f34437277","Type":"ContainerStarted","Data":"2d0472f5f9ea9e0e94445b82cb97169f9616fef1615c77c98d2aff17b45e6589"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.222764 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:41 crc kubenswrapper[4799]: E0121 17:35:41.223834 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.723812084 +0000 UTC m=+168.350102097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.225245 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" event={"ID":"0d624414-fb5b-4553-a695-f2f233248e13","Type":"ContainerStarted","Data":"d1885031eb1985ea6739f1057071448a9d4f254f95df5e51fb6fc69ffd4f9023"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.227943 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sm66d" event={"ID":"97d657de-2fc1-4ed1-b0a8-2b239049c70d","Type":"ContainerStarted","Data":"12e07122d33f16ebe488e59a51d331704c2640ae5fe0210632983885093d415b"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.245115 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24jlh" event={"ID":"14f1d8ff-d287-44f6-8427-2cc844cab8d1","Type":"ContainerStarted","Data":"f649e6efd5b5e45382893b60e363aaa49b766e32503fbe6d1f5c1a2294b85d07"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.249159 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" event={"ID":"45703959-9502-44ab-a19e-19d702259346","Type":"ContainerStarted","Data":"e0768644febf5a42daca75ef5d024316564d7e7620b563c0d30fdc723a9bb1d4"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.251311 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" event={"ID":"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242","Type":"ContainerStarted","Data":"4cde527e239a1ca771b589b8d83fb0adb861052299e7d9e65acc8f58ffe92063"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.251349 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" event={"ID":"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242","Type":"ContainerStarted","Data":"9519f0cc8cb8cafce7b55cab076a11f932680f6524f44cd55648053714df06f3"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.295377 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" podStartSLOduration=139.29534642 podStartE2EDuration="2m19.29534642s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:41.268599461 +0000 UTC m=+167.894889494" watchObservedRunningTime="2026-01-21 17:35:41.29534642 +0000 UTC m=+167.921636463" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.306796 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-6cjlt" event={"ID":"ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a","Type":"ContainerStarted","Data":"03d081da24b542b6efc72821727800ad7670f5ba496afc7d572f5d807e8a09ba"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.307148 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-6cjlt" event={"ID":"ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a","Type":"ContainerStarted","Data":"94a83372fb865d1357b375e1c1ef840271f1f7b2e9b838f520ddb655f571fd1f"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.309111 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.309425 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hk87s"] Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.311308 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.335444 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-catalog-content\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.335492 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-utilities\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.335531 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw4nh\" (UniqueName: \"kubernetes.io/projected/2a7d46ef-dfda-4602-a004-c26ff4335788-kube-api-access-tw4nh\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.335564 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:41 crc kubenswrapper[4799]: E0121 17:35:41.337366 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.837350572 +0000 UTC m=+168.463640595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.371578 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.371677 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.373272 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.377581 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-chqwl" podStartSLOduration=138.377559997 podStartE2EDuration="2m18.377559997s" podCreationTimestamp="2026-01-21 17:33:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:41.372885632 +0000 UTC m=+167.999175655" watchObservedRunningTime="2026-01-21 17:35:41.377559997 +0000 UTC m=+168.003850020" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.377974 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hk87s"] Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.437082 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.437527 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-catalog-content\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.437551 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-utilities\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.437575 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw4nh\" (UniqueName: \"kubernetes.io/projected/2a7d46ef-dfda-4602-a004-c26ff4335788-kube-api-access-tw4nh\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.438945 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-catalog-content\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.439297 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-utilities\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: E0121 17:35:41.447277 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:41.947234576 +0000 UTC m=+168.573524619 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.536289 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw4nh\" (UniqueName: \"kubernetes.io/projected/2a7d46ef-dfda-4602-a004-c26ff4335788-kube-api-access-tw4nh\") pod \"community-operators-hk87s\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.536528 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-46bpg" podStartSLOduration=138.536499512 podStartE2EDuration="2m18.536499512s" podCreationTimestamp="2026-01-21 17:33:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:41.48802903 +0000 UTC m=+168.114319063" watchObservedRunningTime="2026-01-21 17:35:41.536499512 +0000 UTC m=+168.162789535" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.538590 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8qkfv"] Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.837723 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.843021 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.845817 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:41 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:41 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:41 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.845899 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.849581 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.857848 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8qkfv"] Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.868741 4799 generic.go:334] "Generic (PLEG): container finished" podID="88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9" containerID="ca313b136e3078ba84602c0e8c1f66eb4ce7f6ab50fc3d19196b53c6eb95ec55" exitCode=0 Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.868912 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" event={"ID":"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9","Type":"ContainerDied","Data":"ca313b136e3078ba84602c0e8c1f66eb4ce7f6ab50fc3d19196b53c6eb95ec55"} Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.883692 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-utilities\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.884183 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:41 crc kubenswrapper[4799]: E0121 17:35:41.884753 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:42.384732511 +0000 UTC m=+169.011022534 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.906641 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zk5bn"] Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.917236 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.946870 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-6cjlt" podStartSLOduration=139.946843415 podStartE2EDuration="2m19.946843415s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:41.915444962 +0000 UTC m=+168.541734985" watchObservedRunningTime="2026-01-21 17:35:41.946843415 +0000 UTC m=+168.573133438" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.949167 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2z8fw"] Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.950231 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:41 crc kubenswrapper[4799]: I0121 17:35:41.969381 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zk5bn"] Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:41.988598 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:41.989179 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-catalog-content\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:41.989243 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jgws\" (UniqueName: \"kubernetes.io/projected/cb30842a-4bc0-4d3d-aa45-ff611e019759-kube-api-access-7jgws\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:41.989272 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-utilities\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:41.990055 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-utilities\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: E0121 17:35:41.990155 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:42.490106605 +0000 UTC m=+169.116396628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114390 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-catalog-content\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114440 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-utilities\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114471 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ktbc\" (UniqueName: \"kubernetes.io/projected/2145d2a2-e101-44cb-b0c4-4161fbb910f8-kube-api-access-5ktbc\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114495 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9c6v\" (UniqueName: \"kubernetes.io/projected/3e7169e9-ed59-4259-bc63-a1079a9412c0-kube-api-access-v9c6v\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114519 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-catalog-content\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114548 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jgws\" (UniqueName: \"kubernetes.io/projected/cb30842a-4bc0-4d3d-aa45-ff611e019759-kube-api-access-7jgws\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114584 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114610 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-utilities\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114636 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-catalog-content\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114626 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-t47vq" event={"ID":"6d2c8c63-3efc-4ace-9715-0c04fb63a94c","Type":"ContainerStarted","Data":"db472203895a39d7b3e2eb3e3a1a97f357a3297ed3594aa9218521dec1f263ce"} Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.114724 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-t47vq" event={"ID":"6d2c8c63-3efc-4ace-9715-0c04fb63a94c","Type":"ContainerStarted","Data":"595c9f596293b5b190b952d4da448b92b0c6b34a48c8773fcd8c5263d919146e"} Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.115072 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-catalog-content\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: E0121 17:35:42.115957 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:42.615932364 +0000 UTC m=+169.242222377 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.115967 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2z8fw"] Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.128454 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gxpvf" event={"ID":"d7f42c73-0135-4720-b94a-a903f5971266","Type":"ContainerStarted","Data":"eb437c358eecd22d533deaecdffa3a5adb29732a142c7cd2ca8f094b79e9fdea"} Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.165061 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-j254q" event={"ID":"fddb0541-77a5-4db7-8d2a-0b8e94488823","Type":"ContainerStarted","Data":"c1026907818a1f961a2f759c6a9693411677eea65f0b2fb2ac7146ee14333dfa"} Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.194524 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" event={"ID":"a3b37351-15c4-4cf3-8af5-1486009713a6","Type":"ContainerStarted","Data":"e378c5b22dc17361743d35a0f20e8436953665edb8753a510ee331a0c5e635d3"} Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.194581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" event={"ID":"a3b37351-15c4-4cf3-8af5-1486009713a6","Type":"ContainerStarted","Data":"98c37469457a9055728bf72367c40351ba1ced75d4dbae4b206eae3b17bd1a88"} Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.194974 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.219660 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.219844 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ktbc\" (UniqueName: \"kubernetes.io/projected/2145d2a2-e101-44cb-b0c4-4161fbb910f8-kube-api-access-5ktbc\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.219883 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9c6v\" (UniqueName: \"kubernetes.io/projected/3e7169e9-ed59-4259-bc63-a1079a9412c0-kube-api-access-v9c6v\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.219969 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-utilities\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.219996 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-catalog-content\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.220038 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-catalog-content\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.220058 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-utilities\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.220526 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-utilities\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: E0121 17:35:42.221185 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:42.721165034 +0000 UTC m=+169.347455057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.222090 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-utilities\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.227575 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-catalog-content\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.227887 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-catalog-content\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.233154 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jgws\" (UniqueName: \"kubernetes.io/projected/cb30842a-4bc0-4d3d-aa45-ff611e019759-kube-api-access-7jgws\") pod \"certified-operators-8qkfv\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.313641 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.324369 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:42 crc kubenswrapper[4799]: E0121 17:35:42.326728 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:42.826694153 +0000 UTC m=+169.452984176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.401756 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" event={"ID":"d06abe7d-735c-46b1-b98a-f7ef020fe863","Type":"ContainerStarted","Data":"aad5b442667862585fc8652898487927970ec0109ab1ef2aa2e8bcb6aa8700a3"} Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.401837 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.415860 4799 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-96sxw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" start-of-body= Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.416359 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.422072 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.432852 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:42 crc kubenswrapper[4799]: E0121 17:35:42.435510 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:42.935487564 +0000 UTC m=+169.561777587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.439376 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-f8vdn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.440326 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ktbc\" (UniqueName: \"kubernetes.io/projected/2145d2a2-e101-44cb-b0c4-4161fbb910f8-kube-api-access-5ktbc\") pod \"certified-operators-zk5bn\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.532123 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9c6v\" (UniqueName: \"kubernetes.io/projected/3e7169e9-ed59-4259-bc63-a1079a9412c0-kube-api-access-v9c6v\") pod \"community-operators-2z8fw\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.540758 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-mr95r" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.543468 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:42 crc kubenswrapper[4799]: E0121 17:35:42.546654 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:43.046638638 +0000 UTC m=+169.672928661 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.568758 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.588961 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-t47vq" podStartSLOduration=11.588933278 podStartE2EDuration="11.588933278s" podCreationTimestamp="2026-01-21 17:35:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:42.464573885 +0000 UTC m=+169.090863918" watchObservedRunningTime="2026-01-21 17:35:42.588933278 +0000 UTC m=+169.215223301" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.885939 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.887168 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:42 crc kubenswrapper[4799]: E0121 17:35:42.893429 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:43.39338104 +0000 UTC m=+170.019671053 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.893666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:42 crc kubenswrapper[4799]: E0121 17:35:42.894471 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:43.394463623 +0000 UTC m=+170.020753646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.896194 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:42 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:42 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:42 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:42 crc kubenswrapper[4799]: I0121 17:35:42.896237 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.135516 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.137952 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:43.637900035 +0000 UTC m=+170.264190058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.204368 4799 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-56k4p container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.204471 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" podUID="a3b37351-15c4-4cf3-8af5-1486009713a6" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.18:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.241212 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.241657 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:43.741637749 +0000 UTC m=+170.367927772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.277829 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" podStartSLOduration=141.2778066 podStartE2EDuration="2m21.2778066s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:42.999094745 +0000 UTC m=+169.625384768" watchObservedRunningTime="2026-01-21 17:35:43.2778066 +0000 UTC m=+169.904096623" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.289771 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dbbcd"] Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.291440 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.362443 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.363340 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-utilities\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.363365 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:43.86333828 +0000 UTC m=+170.489628303 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.363476 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.363514 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-catalog-content\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.363572 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfnzp\" (UniqueName: \"kubernetes.io/projected/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-kube-api-access-gfnzp\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.364619 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:43.864429394 +0000 UTC m=+170.490719417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.409286 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.452424 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" event={"ID":"9e82402a-bf1e-418b-9ec3-7723300db21b","Type":"ContainerStarted","Data":"0cf76a27aa3c02482e1617a07e38ce83eb19ab44881e39186665de44ba8a0023"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.469196 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.469350 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-catalog-content\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.469382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfnzp\" (UniqueName: \"kubernetes.io/projected/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-kube-api-access-gfnzp\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.469450 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-utilities\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.469809 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-utilities\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.469807 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbbcd"] Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.469897 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:43.969876921 +0000 UTC m=+170.596166944 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.470956 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-catalog-content\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.548712 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" event={"ID":"acba609b-3b15-4514-9237-0d7b4faa356a","Type":"ContainerStarted","Data":"86364deffae116a99592f837d772ae06cacf61173b2a4976d1e557bd2c4224e9"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.570594 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.573639 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.073616985 +0000 UTC m=+170.699907008 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.574991 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-266kh" event={"ID":"1f9e0e11-7d5c-496f-bf3f-c78624ce6083","Type":"ContainerStarted","Data":"4d4ca4e7a7713f3009faeebad4fad81e4fafc9710dd62a459d0a37f26878da99"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.600945 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" event={"ID":"64900be6-8be4-4cd4-8b14-68dfab26a71c","Type":"ContainerStarted","Data":"8580ed310247119a741cb37ec24efcf09adefe5dea53ebdb7814ce4d41619092"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.602795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" event={"ID":"06fabbfb-ca52-4980-9478-5fbe09bca884","Type":"ContainerStarted","Data":"014f524f4df5adbc43364e6d3e0479200d0e879553db408ed36892c3f2122c58"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.603708 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.614531 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" event={"ID":"17204b6d-9470-46fc-996a-5aab9eaef223","Type":"ContainerStarted","Data":"c57396b298c4255d63cc338ef6547157733474e7b9d8f6412632aaf7ff78f1e3"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.622164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" event={"ID":"9f43309b-1eef-471f-8359-d7a35b677818","Type":"ContainerStarted","Data":"c20b3efb7aaf55958ad1959cf86f06ddd3ef381b5788c4be3b97aab972ebd6db"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.622268 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" event={"ID":"9f43309b-1eef-471f-8359-d7a35b677818","Type":"ContainerStarted","Data":"2b3f56d3871855f986f15ce3cae2ca90a6a873d9fdc283e0e7a29b8614b94e7b"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.632699 4799 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2wknp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.632791 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.685085 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.686962 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.186909485 +0000 UTC m=+170.813199508 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.801802 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.804006 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.303991072 +0000 UTC m=+170.930281095 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.673948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" event={"ID":"88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9","Type":"ContainerStarted","Data":"3a17da7b034332abf4d0e888abe53383270bab3e65471459579f306fe3822679"} Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.858495 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:43 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:43 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:43 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.858592 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.913194 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:43 crc kubenswrapper[4799]: E0121 17:35:43.914054 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.414029691 +0000 UTC m=+171.040319714 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:43 crc kubenswrapper[4799]: I0121 17:35:43.936556 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sm66d" event={"ID":"97d657de-2fc1-4ed1-b0a8-2b239049c70d","Type":"ContainerStarted","Data":"aa5cde327611a5c4723c0e522b8b52276399d0d243743230a805add4e936dba0"} Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.003487 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xgvfc"] Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.005207 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.022362 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.022839 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.522820671 +0000 UTC m=+171.149110694 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.041499 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" event={"ID":"8ee870b9-12a2-466c-a4a2-697c9d8c9918","Type":"ContainerStarted","Data":"4c996d0dd395da98c5b7d05cd34094309e42650e665903adbf5a20c5d120c781"} Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.125996 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.126246 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sps4\" (UniqueName: \"kubernetes.io/projected/9a09e09d-8207-4727-9c4e-cea051cb063a-kube-api-access-7sps4\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.126281 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-utilities\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.126313 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-catalog-content\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.126536 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.626516044 +0000 UTC m=+171.252806057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.178745 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfnzp\" (UniqueName: \"kubernetes.io/projected/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-kube-api-access-gfnzp\") pod \"redhat-marketplace-dbbcd\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.186721 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" event={"ID":"bb4cc7ab-97dd-4ae6-b107-d9b89f4a7242","Type":"ContainerStarted","Data":"ec6ee16de3d0c14fc9c2aa8746c0ab97b0afc5a548b3295ecf44c529e1c0c541"} Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.192647 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" event={"ID":"1c11c6a2-f364-43c0-8bbc-a0bb360795e1","Type":"ContainerStarted","Data":"c9b55b1531799ad91164c75af29a5240ef94384277f86e67d51b8691f2d0f620"} Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.227465 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.227551 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sps4\" (UniqueName: \"kubernetes.io/projected/9a09e09d-8207-4727-9c4e-cea051cb063a-kube-api-access-7sps4\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.227573 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-utilities\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.227594 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-catalog-content\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.228889 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.728874316 +0000 UTC m=+171.355164339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.230012 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-utilities\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.241711 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-catalog-content\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.309486 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgvfc"] Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.309533 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" event={"ID":"07c163b1-b21e-4905-944b-ea8f34437277","Type":"ContainerStarted","Data":"5d76937ecbb047581e401b074a86d0bdbeeeb80a503727a12046c75eebb8b09e"} Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.335438 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" event={"ID":"d06abe7d-735c-46b1-b98a-f7ef020fe863","Type":"ContainerStarted","Data":"1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143"} Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.335706 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.336269 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.336309 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.336968 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.836944114 +0000 UTC m=+171.463234137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.337744 4799 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-96sxw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.337784 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.369672 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.434840 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sps4\" (UniqueName: \"kubernetes.io/projected/9a09e09d-8207-4727-9c4e-cea051cb063a-kube-api-access-7sps4\") pod \"redhat-marketplace-xgvfc\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.437618 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.438209 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:44.938190251 +0000 UTC m=+171.564480274 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.673720 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.675580 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.175533974 +0000 UTC m=+171.801823997 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.678346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.680538 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.180519409 +0000 UTC m=+171.806809432 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.681762 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.682212 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.682245 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.683388 4799 patch_prober.go:28] interesting pod/console-f9d7485db-m875t container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.683431 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-m875t" podUID="46c59bb9-7544-496f-a38c-1054b3b95ae8" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.719652 4799 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2wknp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.719710 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.720005 4799 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2wknp container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.720062 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.720549 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.721382 4799 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-8zkr4 container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.25:8443/livez\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.721491 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" podUID="88cf37ab-5ca9-4b48-8d2e-ca5109bd79f9" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.25:8443/livez\": dial tcp 10.217.0.25:8443: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.764581 4799 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-96sxw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.764667 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.26:6443/healthz\": dial tcp 10.217.0.26:6443: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.785190 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.785960 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.285932625 +0000 UTC m=+171.912222648 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.799536 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" podStartSLOduration=143.789633769 podStartE2EDuration="2m23.789633769s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:44.335080966 +0000 UTC m=+170.961370989" watchObservedRunningTime="2026-01-21 17:35:44.789633769 +0000 UTC m=+171.415923792" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.852269 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.856749 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rzgjt"] Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.867196 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.880817 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.880902 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.880907 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.881001 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.890363 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zls6\" (UniqueName: \"kubernetes.io/projected/10d51c83-0754-4e1a-a39f-de83ea48bf7b-kube-api-access-4zls6\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.890419 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-catalog-content\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.890477 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-utilities\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.890504 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.890859 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.390843125 +0000 UTC m=+172.017133148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.938213 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.982454 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vccfc"] Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.983872 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.991033 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.991367 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-catalog-content\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.991468 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-utilities\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.991533 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zls6\" (UniqueName: \"kubernetes.io/projected/10d51c83-0754-4e1a-a39f-de83ea48bf7b-kube-api-access-4zls6\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:44 crc kubenswrapper[4799]: E0121 17:35:44.992590 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.492570647 +0000 UTC m=+172.118860670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.993205 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-catalog-content\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:44 crc kubenswrapper[4799]: I0121 17:35:44.993454 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-utilities\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.011969 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vccfc"] Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.018344 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rzgjt"] Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.096780 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-utilities\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.097328 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhd5x\" (UniqueName: \"kubernetes.io/projected/cfc335e9-4154-4713-a1b7-96f30bdab940-kube-api-access-rhd5x\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.097404 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.097456 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-catalog-content\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.097901 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.597879619 +0000 UTC m=+172.224169642 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.140113 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:45 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:45 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:45 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.140230 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.202011 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.202316 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.702273664 +0000 UTC m=+172.328563687 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.202388 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhd5x\" (UniqueName: \"kubernetes.io/projected/cfc335e9-4154-4713-a1b7-96f30bdab940-kube-api-access-rhd5x\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.202507 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.202557 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-catalog-content\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.202818 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-utilities\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.203656 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-utilities\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.204061 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.704047209 +0000 UTC m=+172.330337232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.204405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-catalog-content\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.290917 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zls6\" (UniqueName: \"kubernetes.io/projected/10d51c83-0754-4e1a-a39f-de83ea48bf7b-kube-api-access-4zls6\") pod \"redhat-operators-rzgjt\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.307251 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.307818 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.807788853 +0000 UTC m=+172.434078876 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.333521 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.356183 4799 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-56k4p container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.356258 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" podUID="a3b37351-15c4-4cf3-8af5-1486009713a6" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.18:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.405237 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" podStartSLOduration=143.405216411 podStartE2EDuration="2m23.405216411s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:45.315818342 +0000 UTC m=+171.942108385" watchObservedRunningTime="2026-01-21 17:35:45.405216411 +0000 UTC m=+172.031506434" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.405376 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-csxlf" podStartSLOduration=143.405371376 podStartE2EDuration="2m23.405371376s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:45.403472747 +0000 UTC m=+172.029762770" watchObservedRunningTime="2026-01-21 17:35:45.405371376 +0000 UTC m=+172.031661409" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.434888 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.435261 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:45.935247822 +0000 UTC m=+172.561537845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.448218 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhd5x\" (UniqueName: \"kubernetes.io/projected/cfc335e9-4154-4713-a1b7-96f30bdab940-kube-api-access-rhd5x\") pod \"redhat-operators-vccfc\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.546750 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.547172 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.047083856 +0000 UTC m=+172.673373879 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.547623 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.548174 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.04815706 +0000 UTC m=+172.674447083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.564994 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" event={"ID":"0d624414-fb5b-4553-a695-f2f233248e13","Type":"ContainerStarted","Data":"e32efc21109a4e5d6f641c039680cfdef32cc1c97cebfc8bd2e2fa9af375ca52"} Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.594827 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" event={"ID":"1c11c6a2-f364-43c0-8bbc-a0bb360795e1","Type":"ContainerStarted","Data":"8c2c1a9a2fabbbdfd7b39969e9309c10363b40bb39a173ab8e32d27390278b21"} Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.597328 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" event={"ID":"afc63db8-935e-43c5-952b-593f1b1e3350","Type":"ContainerStarted","Data":"7a23220f11deb84f3dbc806f916fb424f115051a62ac20c96fafea15481042ee"} Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.600177 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" event={"ID":"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd","Type":"ContainerStarted","Data":"02204326b60d8c84f2496054b306d2ffb58dacf02996e2c6bedded1984446521"} Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.604902 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.650038 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" podStartSLOduration=143.650008536 podStartE2EDuration="2m23.650008536s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:45.636121225 +0000 UTC m=+172.262411268" watchObservedRunningTime="2026-01-21 17:35:45.650008536 +0000 UTC m=+172.276298549" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.651437 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.658930 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.673666 4799 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-56k4p container/packageserver namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.18:5443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.674254 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" podUID="a3b37351-15c4-4cf3-8af5-1486009713a6" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.18:5443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.677250 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.177218699 +0000 UTC m=+172.803508722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.682512 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.719496 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-j254q" event={"ID":"fddb0541-77a5-4db7-8d2a-0b8e94488823","Type":"ContainerStarted","Data":"54743e6f9a0d5e9747a8ebaea4adef751af2d84d3387543e7eac948159bc1c86"} Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.773197 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.773679 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.273664457 +0000 UTC m=+172.899954480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.817009 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tc5lz" podStartSLOduration=143.816986769 podStartE2EDuration="2m23.816986769s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:45.80539596 +0000 UTC m=+172.431685983" watchObservedRunningTime="2026-01-21 17:35:45.816986769 +0000 UTC m=+172.443276792" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.834651 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7796adba-b973-44ee-b0c4-c0df544250e3-metrics-certs\") pod \"network-metrics-daemon-7q999\" (UID: \"7796adba-b973-44ee-b0c4-c0df544250e3\") " pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.853490 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:45 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:45 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:45 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.853640 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.853094 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" podStartSLOduration=144.853060466 podStartE2EDuration="2m24.853060466s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:45.843350676 +0000 UTC m=+172.469640719" watchObservedRunningTime="2026-01-21 17:35:45.853060466 +0000 UTC m=+172.479350479" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.861327 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" event={"ID":"07c163b1-b21e-4905-944b-ea8f34437277","Type":"ContainerStarted","Data":"8884b8dad8332ad68bf634b4bfef7645173adac2009b72a01b55f365d7355c46"} Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.872883 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hk87s"] Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.874898 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.875118 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" event={"ID":"17204b6d-9470-46fc-996a-5aab9eaef223","Type":"ContainerStarted","Data":"22be55463a3561d225cb59fccdb53cf83e25aadf4d3026172bfd54db14141be7"} Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.875196 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zk5bn"] Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.883195 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.883304 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.885836 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.382107526 +0000 UTC m=+173.008397549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.888620 4799 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2wknp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.888702 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.895162 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.911380 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" podStartSLOduration=144.911355442 podStartE2EDuration="2m24.911355442s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:45.910862617 +0000 UTC m=+172.537152640" watchObservedRunningTime="2026-01-21 17:35:45.911355442 +0000 UTC m=+172.537645465" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.944572 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2z8fw"] Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.950559 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-q5ndp" podStartSLOduration=143.950532986 podStartE2EDuration="2m23.950532986s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:45.949402401 +0000 UTC m=+172.575692604" watchObservedRunningTime="2026-01-21 17:35:45.950532986 +0000 UTC m=+172.576823019" Jan 21 17:35:45 crc kubenswrapper[4799]: I0121 17:35:45.982658 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:45 crc kubenswrapper[4799]: E0121 17:35:45.991332 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.491288249 +0000 UTC m=+173.117578262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.008542 4799 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-vfk7d container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.008638 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" podUID="8ee870b9-12a2-466c-a4a2-697c9d8c9918" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.008844 4799 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-vfk7d container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.008927 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" podUID="8ee870b9-12a2-466c-a4a2-697c9d8c9918" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.018493 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f7nd4" podStartSLOduration=144.018465391 podStartE2EDuration="2m24.018465391s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:45.993738905 +0000 UTC m=+172.620028938" watchObservedRunningTime="2026-01-21 17:35:46.018465391 +0000 UTC m=+172.644755414" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.018546 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7q999" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.026329 4799 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-vfk7d container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.026428 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" podUID="8ee870b9-12a2-466c-a4a2-697c9d8c9918" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.098585 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.099054 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.599031207 +0000 UTC m=+173.225321230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.120682 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qs4j8" podStartSLOduration=144.120660067 podStartE2EDuration="2m24.120660067s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:46.119620245 +0000 UTC m=+172.745910268" watchObservedRunningTime="2026-01-21 17:35:46.120660067 +0000 UTC m=+172.746950090" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.212604 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.213579 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.713561515 +0000 UTC m=+173.339851538 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.284247 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56k4p" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.284589 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8qkfv"] Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.327450 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.328263 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.828241428 +0000 UTC m=+173.454531451 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.430025 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.434964 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:46.934939653 +0000 UTC m=+173.561229676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.572423 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.584300 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.084260969 +0000 UTC m=+173.710550992 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.616630 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vccfc"] Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.686382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.686786 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.186770426 +0000 UTC m=+173.813060449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.707840 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rzgjt"] Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.788228 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.788443 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.288414864 +0000 UTC m=+173.914704897 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.796893 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.797555 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.297536957 +0000 UTC m=+173.923826980 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.808756 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.831649 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-l8wnp" podStartSLOduration=144.831625963 podStartE2EDuration="2m24.831625963s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:46.829993813 +0000 UTC m=+173.456283856" watchObservedRunningTime="2026-01-21 17:35:46.831625963 +0000 UTC m=+173.457915976" Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.904121 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.904239 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.404214382 +0000 UTC m=+174.030504405 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.904385 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:46 crc kubenswrapper[4799]: E0121 17:35:46.906722 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.406697769 +0000 UTC m=+174.032987792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.961656 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:46 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:46 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:46 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:46 crc kubenswrapper[4799]: I0121 17:35:46.961757 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.112528 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:47 crc kubenswrapper[4799]: E0121 17:35:47.113112 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.613091294 +0000 UTC m=+174.239381317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.371184 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:47 crc kubenswrapper[4799]: E0121 17:35:47.371591 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.871576602 +0000 UTC m=+174.497866625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.473811 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:47 crc kubenswrapper[4799]: E0121 17:35:47.474247 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.974215782 +0000 UTC m=+174.600505805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.474597 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:47 crc kubenswrapper[4799]: E0121 17:35:47.475190 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:47.97510807 +0000 UTC m=+174.601398093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.569904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" event={"ID":"f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd","Type":"ContainerStarted","Data":"6ff937c30a4e19588387939eba8856fbc3c1958fdab87a1fec541032704bf2ee"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.578051 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:47 crc kubenswrapper[4799]: E0121 17:35:47.579484 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.079454273 +0000 UTC m=+174.705744296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.587638 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk87s" event={"ID":"2a7d46ef-dfda-4602-a004-c26ff4335788","Type":"ContainerStarted","Data":"c58e2c5986ecd33cdec580f18688d5303c79da5b87b6317a778ef1d45b5297db"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.686802 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:47 crc kubenswrapper[4799]: E0121 17:35:47.687424 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.187399747 +0000 UTC m=+174.813689770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.695221 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z8fw" event={"ID":"3e7169e9-ed59-4259-bc63-a1079a9412c0","Type":"ContainerStarted","Data":"320277bc27fd1de47ca7e3e01b4bd6c6972eace8b6347da3bb90efe4c4416f32"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.695278 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z8fw" event={"ID":"3e7169e9-ed59-4259-bc63-a1079a9412c0","Type":"ContainerStarted","Data":"08f2d23a460bf45492f7e4e10bfbca1c192a0f5dca16997bcb391235ead0a89c"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.699845 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.711943 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qkfv" event={"ID":"cb30842a-4bc0-4d3d-aa45-ff611e019759","Type":"ContainerStarted","Data":"e49c154cff1de0755c12ed36122c3e77e41b4af40ce5eb02370385891bf40c96"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.728317 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" event={"ID":"0d624414-fb5b-4553-a695-f2f233248e13","Type":"ContainerStarted","Data":"e81fa6ff8def6e8e3a4342f2e5ec90645aa76f7ca168dce67f3a72ef805550b6"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.790032 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:47 crc kubenswrapper[4799]: E0121 17:35:47.790610 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.290591194 +0000 UTC m=+174.916881217 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.834685 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-sm66d" event={"ID":"97d657de-2fc1-4ed1-b0a8-2b239049c70d","Type":"ContainerStarted","Data":"9bb6f51ca684970099322352a1e49bee7cabdf33850d1ee37351300683bde5e5"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.836475 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.845625 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vccfc" event={"ID":"cfc335e9-4154-4713-a1b7-96f30bdab940","Type":"ContainerStarted","Data":"f6ccf8a5397af8494e8f7132dd8d718728e070b0cc081623749ed5ff986c4237"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.847102 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk5bn" event={"ID":"2145d2a2-e101-44cb-b0c4-4161fbb910f8","Type":"ContainerStarted","Data":"360e8e916da6cf0414e65b875187ecaa9eb6fff4a48dc47ad89861d1a1ea582f"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.854808 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgjt" event={"ID":"10d51c83-0754-4e1a-a39f-de83ea48bf7b","Type":"ContainerStarted","Data":"d40327d1b0df9a0f34d6298c7f771426310bc379af405be478cdfd5321a1bb0f"} Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.859395 4799 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2wknp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.859482 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.859729 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:47 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:47 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:47 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.859772 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:47 crc kubenswrapper[4799]: I0121 17:35:47.901374 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:47 crc kubenswrapper[4799]: E0121 17:35:47.922857 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.422836421 +0000 UTC m=+175.049126444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.128835 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.130256 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.630209306 +0000 UTC m=+175.256499329 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.130421 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.130882 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.630869217 +0000 UTC m=+175.257159240 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.263409 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.263693 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.76364268 +0000 UTC m=+175.389932703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.263772 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.264536 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.764526758 +0000 UTC m=+175.390816781 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.385949 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.386917 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:48.886853317 +0000 UTC m=+175.513143340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.509077 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.509520 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.009504087 +0000 UTC m=+175.635794110 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.609927 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.610739 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.110718813 +0000 UTC m=+175.737008836 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.656095 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jnshm" podStartSLOduration=146.656073078 podStartE2EDuration="2m26.656073078s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:48.654473559 +0000 UTC m=+175.280763602" watchObservedRunningTime="2026-01-21 17:35:48.656073078 +0000 UTC m=+175.282363101" Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.656669 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7wbqr" podStartSLOduration=146.656660467 podStartE2EDuration="2m26.656660467s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:48.101827877 +0000 UTC m=+174.728117900" watchObservedRunningTime="2026-01-21 17:35:48.656660467 +0000 UTC m=+175.282950480" Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.672002 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbbcd"] Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.696223 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.696418 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.710469 4799 patch_prober.go:28] interesting pod/apiserver-76f77b778f-w2n7v container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.710564 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" podUID="f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.716725 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.724926 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.22487072 +0000 UTC m=+175.851160743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.731011 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgvfc"] Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.819498 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:48 crc kubenswrapper[4799]: E0121 17:35:48.820016 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.319991257 +0000 UTC m=+175.946281280 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.838992 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vfk7d" Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.854321 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:48 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:48 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:48 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:48 crc kubenswrapper[4799]: I0121 17:35:48.854394 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:48.924190 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:48.925038 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.425021921 +0000 UTC m=+176.051311944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:48.952586 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vccfc" event={"ID":"cfc335e9-4154-4713-a1b7-96f30bdab940","Type":"ContainerStarted","Data":"2e5e8f7cf8b6085ef90b348ffd10e54d061db67d039a27604dc1c49b5259b64b"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:48.970880 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgvfc" event={"ID":"9a09e09d-8207-4727-9c4e-cea051cb063a","Type":"ContainerStarted","Data":"da17ceb37134c3bed3b69a22f8fca7eb4e4866d05848c981d16827426c18bead"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.016902 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgjt" event={"ID":"10d51c83-0754-4e1a-a39f-de83ea48bf7b","Type":"ContainerStarted","Data":"9650594c571ab74427ec889bc274b85b035bbe248fa6ef69e1a583ab1174cbe1"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.030894 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.031642 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.531620964 +0000 UTC m=+176.157910987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.038244 4799 generic.go:334] "Generic (PLEG): container finished" podID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerID="6645693638f38ec9faaca5b9de7a454b1d9736346297884af09de306a80ce68b" exitCode=0 Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.038358 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk87s" event={"ID":"2a7d46ef-dfda-4602-a004-c26ff4335788","Type":"ContainerDied","Data":"6645693638f38ec9faaca5b9de7a454b1d9736346297884af09de306a80ce68b"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.060526 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" podStartSLOduration=148.060505259 podStartE2EDuration="2m28.060505259s" podCreationTimestamp="2026-01-21 17:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:49.039933871 +0000 UTC m=+175.666223914" watchObservedRunningTime="2026-01-21 17:35:49.060505259 +0000 UTC m=+175.686795282" Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.074242 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-j254q" event={"ID":"fddb0541-77a5-4db7-8d2a-0b8e94488823","Type":"ContainerStarted","Data":"a67b6de899e2b36d62cb638c700e0279ce887c9f884b347456af70bb8f362222"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.096569 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-7q999"] Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.100516 4799 generic.go:334] "Generic (PLEG): container finished" podID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerID="320277bc27fd1de47ca7e3e01b4bd6c6972eace8b6347da3bb90efe4c4416f32" exitCode=0 Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.100674 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z8fw" event={"ID":"3e7169e9-ed59-4259-bc63-a1079a9412c0","Type":"ContainerDied","Data":"320277bc27fd1de47ca7e3e01b4bd6c6972eace8b6347da3bb90efe4c4416f32"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.127103 4799 generic.go:334] "Generic (PLEG): container finished" podID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerID="c4302fd9e6c5aafa1ffde4166178ec0c6ab6c5807c15986b62678ee18aa6f492" exitCode=0 Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.127259 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk5bn" event={"ID":"2145d2a2-e101-44cb-b0c4-4161fbb910f8","Type":"ContainerDied","Data":"c4302fd9e6c5aafa1ffde4166178ec0c6ab6c5807c15986b62678ee18aa6f492"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.132104 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.132502 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.632488589 +0000 UTC m=+176.258778602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.148580 4799 generic.go:334] "Generic (PLEG): container finished" podID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerID="63fef1d3497f2846f928ab9c25141b3f4880b8afb782398ea0220eb06e79e4f3" exitCode=0 Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.148740 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qkfv" event={"ID":"cb30842a-4bc0-4d3d-aa45-ff611e019759","Type":"ContainerDied","Data":"63fef1d3497f2846f928ab9c25141b3f4880b8afb782398ea0220eb06e79e4f3"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.177417 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbbcd" event={"ID":"ad7b0f43-cc68-4c74-967f-bc61107e6d0f","Type":"ContainerStarted","Data":"0be8e626636345a4e29e70cc2bcd1fe9ea856fe37160d0d16d75a247031f4acb"} Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.234872 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.235269 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.735250723 +0000 UTC m=+176.361540746 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.293804 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-sm66d" podStartSLOduration=18.293782086 podStartE2EDuration="18.293782086s" podCreationTimestamp="2026-01-21 17:35:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:49.291206686 +0000 UTC m=+175.917496709" watchObservedRunningTime="2026-01-21 17:35:49.293782086 +0000 UTC m=+175.920072109" Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.343274 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.345108 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.845086295 +0000 UTC m=+176.471376318 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.458214 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.458745 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:49.958724816 +0000 UTC m=+176.585014839 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.482635 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-5mmz6" podStartSLOduration=147.482607546 podStartE2EDuration="2m27.482607546s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:49.457862189 +0000 UTC m=+176.084152212" watchObservedRunningTime="2026-01-21 17:35:49.482607546 +0000 UTC m=+176.108897569" Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.560943 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.561404 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.061390227 +0000 UTC m=+176.687680250 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.570426 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.585251 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8zkr4" Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.662576 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.664195 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.164170851 +0000 UTC m=+176.790460874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.833335 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.833817 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.333802777 +0000 UTC m=+176.960092800 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.902915 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:49 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:49 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:49 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.903003 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:49 crc kubenswrapper[4799]: I0121 17:35:49.935161 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:49 crc kubenswrapper[4799]: E0121 17:35:49.935654 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.435630922 +0000 UTC m=+177.061920945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.039146 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:50 crc kubenswrapper[4799]: E0121 17:35:50.039665 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.539621283 +0000 UTC m=+177.165911306 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.144053 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:50 crc kubenswrapper[4799]: E0121 17:35:50.144601 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.644570044 +0000 UTC m=+177.270860067 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.246705 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:50 crc kubenswrapper[4799]: E0121 17:35:50.248952 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.748583957 +0000 UTC m=+177.374873970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.349560 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:50 crc kubenswrapper[4799]: E0121 17:35:50.349864 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.849845514 +0000 UTC m=+177.476135537 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.368320 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-7q999" event={"ID":"7796adba-b973-44ee-b0c4-c0df544250e3","Type":"ContainerStarted","Data":"1b4d7e44cb5ef970a34afced5cf3074b0e310761bc9a8f4146b0badbaf028c07"} Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.410638 4799 generic.go:334] "Generic (PLEG): container finished" podID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerID="9650594c571ab74427ec889bc274b85b035bbe248fa6ef69e1a583ab1174cbe1" exitCode=0 Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.410759 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgjt" event={"ID":"10d51c83-0754-4e1a-a39f-de83ea48bf7b","Type":"ContainerDied","Data":"9650594c571ab74427ec889bc274b85b035bbe248fa6ef69e1a583ab1174cbe1"} Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.415765 4799 generic.go:334] "Generic (PLEG): container finished" podID="9e82402a-bf1e-418b-9ec3-7723300db21b" containerID="0cf76a27aa3c02482e1617a07e38ce83eb19ab44881e39186665de44ba8a0023" exitCode=0 Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.415829 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" event={"ID":"9e82402a-bf1e-418b-9ec3-7723300db21b","Type":"ContainerDied","Data":"0cf76a27aa3c02482e1617a07e38ce83eb19ab44881e39186665de44ba8a0023"} Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.455188 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:50 crc kubenswrapper[4799]: E0121 17:35:50.455621 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:50.955606701 +0000 UTC m=+177.581896724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.466739 4799 generic.go:334] "Generic (PLEG): container finished" podID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerID="2e5e8f7cf8b6085ef90b348ffd10e54d061db67d039a27604dc1c49b5259b64b" exitCode=0 Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.466810 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vccfc" event={"ID":"cfc335e9-4154-4713-a1b7-96f30bdab940","Type":"ContainerDied","Data":"2e5e8f7cf8b6085ef90b348ffd10e54d061db67d039a27604dc1c49b5259b64b"} Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.471317 4799 generic.go:334] "Generic (PLEG): container finished" podID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerID="f9c6e2fc324951cbde93977c300e93553ede7063fd5ab935af4587be88b7a7d0" exitCode=0 Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.471370 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgvfc" event={"ID":"9a09e09d-8207-4727-9c4e-cea051cb063a","Type":"ContainerDied","Data":"f9c6e2fc324951cbde93977c300e93553ede7063fd5ab935af4587be88b7a7d0"} Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.477407 4799 generic.go:334] "Generic (PLEG): container finished" podID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" containerID="63d3065eb6810eddd864256c0f40e8859b5a91834acd8e5e0feafba402ff2087" exitCode=0 Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.478414 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbbcd" event={"ID":"ad7b0f43-cc68-4c74-967f-bc61107e6d0f","Type":"ContainerDied","Data":"63d3065eb6810eddd864256c0f40e8859b5a91834acd8e5e0feafba402ff2087"} Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.633804 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:50 crc kubenswrapper[4799]: E0121 17:35:50.634370 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:51.134349907 +0000 UTC m=+177.760639930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.757736 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:50 crc kubenswrapper[4799]: E0121 17:35:50.761713 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:51.261695764 +0000 UTC m=+177.887985987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.862538 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:50 crc kubenswrapper[4799]: E0121 17:35:50.863085 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:51.363060856 +0000 UTC m=+177.989350879 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.867571 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:50 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:50 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:50 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:50 crc kubenswrapper[4799]: I0121 17:35:50.867650 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:50.963992 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:50.964434 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:51.464420457 +0000 UTC m=+178.090710470 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.124633 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:51.125069 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:51.625036155 +0000 UTC m=+178.251326178 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.125244 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:51.125635 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:51.625625313 +0000 UTC m=+178.251915336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.234312 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:51.235385 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:51.735356134 +0000 UTC m=+178.361646147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.235828 4799 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.399333 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:51.399700 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:51.899684838 +0000 UTC m=+178.525974861 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.500164 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:51.500357 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:52.000321437 +0000 UTC m=+178.626611460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.500661 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:51.501226 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:52.001202214 +0000 UTC m=+178.627492237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.725088 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:51.725507 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-21 17:35:52.225485594 +0000 UTC m=+178.851775617 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.727337 4799 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-21T17:35:51.235917522Z","Handler":null,"Name":""} Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.836028 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:51 crc kubenswrapper[4799]: E0121 17:35:51.836535 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-21 17:35:52.336516094 +0000 UTC m=+178.962806117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wqt42" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.852232 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:51 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:51 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:51 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.852309 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.889955 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.890885 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.891868 4799 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.891900 4799 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.900904 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.904077 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.906054 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.920922 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-j254q" event={"ID":"fddb0541-77a5-4db7-8d2a-0b8e94488823","Type":"ContainerStarted","Data":"3cdcfcbc2c1f77b9a1b82e34d2a9cd4be585cd1fb786ab546b4ad2615747455e"} Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.937477 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.937699 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b880559-37c6-4d0e-983a-004d4d0991b6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2b880559-37c6-4d0e-983a-004d4d0991b6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.937777 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b880559-37c6-4d0e-983a-004d4d0991b6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2b880559-37c6-4d0e-983a-004d4d0991b6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:35:51 crc kubenswrapper[4799]: I0121 17:35:51.952193 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-7q999" event={"ID":"7796adba-b973-44ee-b0c4-c0df544250e3","Type":"ContainerStarted","Data":"9fb94e05eeb1ab8bd715b41db5b07927e8f1cd53f6ccfa939d5d76a12ab9db04"} Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.001251 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.039404 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b880559-37c6-4d0e-983a-004d4d0991b6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2b880559-37c6-4d0e-983a-004d4d0991b6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.039498 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.039524 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b880559-37c6-4d0e-983a-004d4d0991b6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2b880559-37c6-4d0e-983a-004d4d0991b6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.039882 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b880559-37c6-4d0e-983a-004d4d0991b6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2b880559-37c6-4d0e-983a-004d4d0991b6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.061466 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.061536 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.130216 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b880559-37c6-4d0e-983a-004d4d0991b6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2b880559-37c6-4d0e-983a-004d4d0991b6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.261597 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.262552 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.686240 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wqt42\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.891187 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:52 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:52 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:52 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.891583 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.910043 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-sm66d" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.941321 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.942625 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.946886 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.947115 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.970359 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.984544 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.986070 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:35:52 crc kubenswrapper[4799]: I0121 17:35:52.986114 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.166913 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.167291 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.167507 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.171940 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-j254q" event={"ID":"fddb0541-77a5-4db7-8d2a-0b8e94488823","Type":"ContainerStarted","Data":"59ebfe0e325d17ee4984a269ac3bd26015f4287af5e7ef5caa1b5dee8ce7faa4"} Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.203373 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-7q999" event={"ID":"7796adba-b973-44ee-b0c4-c0df544250e3","Type":"ContainerStarted","Data":"93a72bea32ebfa98871f0aa6eb0e2dbd2e520e74224d9066663064756dbba09c"} Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.211062 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.230591 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-j254q" podStartSLOduration=22.230557554 podStartE2EDuration="22.230557554s" podCreationTimestamp="2026-01-21 17:35:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:53.223557097 +0000 UTC m=+179.849847140" watchObservedRunningTime="2026-01-21 17:35:53.230557554 +0000 UTC m=+179.856847587" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.393769 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-7q999" podStartSLOduration=151.393737449 podStartE2EDuration="2m31.393737449s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:53.258856611 +0000 UTC m=+179.885146634" watchObservedRunningTime="2026-01-21 17:35:53.393737449 +0000 UTC m=+180.020027462" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.403721 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.452089 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:35:53 crc kubenswrapper[4799]: W0121 17:35:53.642336 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod2b880559_37c6_4d0e_983a_004d4d0991b6.slice/crio-c75ca5baad4a806746b1de2f1a88b41fe532bb3fd2c5b3aff13603f0b9a70d43 WatchSource:0}: Error finding container c75ca5baad4a806746b1de2f1a88b41fe532bb3fd2c5b3aff13603f0b9a70d43: Status 404 returned error can't find the container with id c75ca5baad4a806746b1de2f1a88b41fe532bb3fd2c5b3aff13603f0b9a70d43 Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.846283 4799 patch_prober.go:28] interesting pod/apiserver-76f77b778f-w2n7v container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]log ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]etcd ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/generic-apiserver-start-informers ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/max-in-flight-filter ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 21 17:35:53 crc kubenswrapper[4799]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 21 17:35:53 crc kubenswrapper[4799]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/project.openshift.io-projectcache ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/openshift.io-startinformers ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 21 17:35:53 crc kubenswrapper[4799]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 21 17:35:53 crc kubenswrapper[4799]: livez check failed Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.846412 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" podUID="f92f4d8f-40e8-4369-aae0-5e4b5d42c4bd" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.873699 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:53 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:53 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:53 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.873811 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:53 crc kubenswrapper[4799]: I0121 17:35:53.979711 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.141895 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e82402a-bf1e-418b-9ec3-7723300db21b-config-volume\") pod \"9e82402a-bf1e-418b-9ec3-7723300db21b\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.142436 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e82402a-bf1e-418b-9ec3-7723300db21b-secret-volume\") pod \"9e82402a-bf1e-418b-9ec3-7723300db21b\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.142492 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjz4n\" (UniqueName: \"kubernetes.io/projected/9e82402a-bf1e-418b-9ec3-7723300db21b-kube-api-access-bjz4n\") pod \"9e82402a-bf1e-418b-9ec3-7723300db21b\" (UID: \"9e82402a-bf1e-418b-9ec3-7723300db21b\") " Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.142933 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e82402a-bf1e-418b-9ec3-7723300db21b-config-volume" (OuterVolumeSpecName: "config-volume") pod "9e82402a-bf1e-418b-9ec3-7723300db21b" (UID: "9e82402a-bf1e-418b-9ec3-7723300db21b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.317245 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e82402a-bf1e-418b-9ec3-7723300db21b-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.374965 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2b880559-37c6-4d0e-983a-004d4d0991b6","Type":"ContainerStarted","Data":"c75ca5baad4a806746b1de2f1a88b41fe532bb3fd2c5b3aff13603f0b9a70d43"} Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.385744 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" event={"ID":"9e82402a-bf1e-418b-9ec3-7723300db21b","Type":"ContainerDied","Data":"c6c8d1f47edb7357f362b77647a7ddcd02472436da632cc70d23896727c2ccd8"} Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.385895 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6c8d1f47edb7357f362b77647a7ddcd02472436da632cc70d23896727c2ccd8" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.386236 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.584537 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e82402a-bf1e-418b-9ec3-7723300db21b-kube-api-access-bjz4n" (OuterVolumeSpecName: "kube-api-access-bjz4n") pod "9e82402a-bf1e-418b-9ec3-7723300db21b" (UID: "9e82402a-bf1e-418b-9ec3-7723300db21b"). InnerVolumeSpecName "kube-api-access-bjz4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.593815 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjz4n\" (UniqueName: \"kubernetes.io/projected/9e82402a-bf1e-418b-9ec3-7723300db21b-kube-api-access-bjz4n\") on node \"crc\" DevicePath \"\"" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.604026 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e82402a-bf1e-418b-9ec3-7723300db21b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9e82402a-bf1e-418b-9ec3-7723300db21b" (UID: "9e82402a-bf1e-418b-9ec3-7723300db21b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.615104 4799 patch_prober.go:28] interesting pod/console-f9d7485db-m875t container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.615222 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-m875t" podUID="46c59bb9-7544-496f-a38c-1054b3b95ae8" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.653847 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wqt42"] Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.697446 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e82402a-bf1e-418b-9ec3-7723300db21b-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.836889 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:54 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:54 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:54 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.836959 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.899912 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.900006 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.902250 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.902314 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:35:54 crc kubenswrapper[4799]: I0121 17:35:54.907648 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:35:55 crc kubenswrapper[4799]: I0121 17:35:55.457933 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" event={"ID":"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3","Type":"ContainerStarted","Data":"459bb4056a2d8d8049ece69f7e13d569da9a9dc399189618f5ad4d910cde5109"} Jan 21 17:35:55 crc kubenswrapper[4799]: I0121 17:35:55.462929 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2b880559-37c6-4d0e-983a-004d4d0991b6","Type":"ContainerStarted","Data":"366e2c1ed29c60c58404e0464cb13cbeee0d60a0eff8719f24e3bd5c29f3e1f7"} Jan 21 17:35:55 crc kubenswrapper[4799]: I0121 17:35:55.672517 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 21 17:35:55 crc kubenswrapper[4799]: I0121 17:35:55.835985 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:55 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:55 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:55 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:55 crc kubenswrapper[4799]: I0121 17:35:55.836055 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:55 crc kubenswrapper[4799]: W0121 17:35:55.898057 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode46e21d1_a2ce_42fb_a5cf_ab5df29c1aa0.slice/crio-c3abb1014e7fc24ab764e346f7e59f502d9b79f847c042e2a3dffd5841b25614 WatchSource:0}: Error finding container c3abb1014e7fc24ab764e346f7e59f502d9b79f847c042e2a3dffd5841b25614: Status 404 returned error can't find the container with id c3abb1014e7fc24ab764e346f7e59f502d9b79f847c042e2a3dffd5841b25614 Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.103447 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.103551 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.524590 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0","Type":"ContainerStarted","Data":"c3abb1014e7fc24ab764e346f7e59f502d9b79f847c042e2a3dffd5841b25614"} Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.528317 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" event={"ID":"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3","Type":"ContainerStarted","Data":"179884f2edbcc76d15f1cbd068c7e2ba36779347094045510cf75e7bb6226e86"} Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.528565 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.629661 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" podStartSLOduration=154.629639653 podStartE2EDuration="2m34.629639653s" podCreationTimestamp="2026-01-21 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:56.626033831 +0000 UTC m=+183.252323864" watchObservedRunningTime="2026-01-21 17:35:56.629639653 +0000 UTC m=+183.255929676" Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.649320 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=5.649284871 podStartE2EDuration="5.649284871s" podCreationTimestamp="2026-01-21 17:35:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:56.64731565 +0000 UTC m=+183.273605683" watchObservedRunningTime="2026-01-21 17:35:56.649284871 +0000 UTC m=+183.275574894" Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.876673 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:56 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:56 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:56 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:56 crc kubenswrapper[4799]: I0121 17:35:56.876761 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:57 crc kubenswrapper[4799]: I0121 17:35:57.833298 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:57 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:57 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:57 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:57 crc kubenswrapper[4799]: I0121 17:35:57.834025 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:58 crc kubenswrapper[4799]: I0121 17:35:58.765353 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0","Type":"ContainerStarted","Data":"30629310d99f7ccadefb0bb1fe78ddf3eb463ccb0dcc7ecea1d59b2cbccd87da"} Jan 21 17:35:58 crc kubenswrapper[4799]: I0121 17:35:58.798782 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:58 crc kubenswrapper[4799]: I0121 17:35:58.804303 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-w2n7v" Jan 21 17:35:58 crc kubenswrapper[4799]: I0121 17:35:58.821032 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=6.820949763 podStartE2EDuration="6.820949763s" podCreationTimestamp="2026-01-21 17:35:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:35:58.816915168 +0000 UTC m=+185.443205191" watchObservedRunningTime="2026-01-21 17:35:58.820949763 +0000 UTC m=+185.447239786" Jan 21 17:35:58 crc kubenswrapper[4799]: I0121 17:35:58.835942 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:58 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:58 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:58 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:58 crc kubenswrapper[4799]: I0121 17:35:58.836118 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:35:59 crc kubenswrapper[4799]: I0121 17:35:59.898317 4799 patch_prober.go:28] interesting pod/router-default-5444994796-q6bfd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 21 17:35:59 crc kubenswrapper[4799]: [-]has-synced failed: reason withheld Jan 21 17:35:59 crc kubenswrapper[4799]: [+]process-running ok Jan 21 17:35:59 crc kubenswrapper[4799]: healthz check failed Jan 21 17:35:59 crc kubenswrapper[4799]: I0121 17:35:59.898430 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-q6bfd" podUID="c90c1c25-29e5-416a-af94-db168bd239b0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 21 17:36:00 crc kubenswrapper[4799]: I0121 17:36:00.850455 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:36:00 crc kubenswrapper[4799]: I0121 17:36:00.863320 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-q6bfd" Jan 21 17:36:00 crc kubenswrapper[4799]: I0121 17:36:00.965878 4799 generic.go:334] "Generic (PLEG): container finished" podID="e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0" containerID="30629310d99f7ccadefb0bb1fe78ddf3eb463ccb0dcc7ecea1d59b2cbccd87da" exitCode=0 Jan 21 17:36:00 crc kubenswrapper[4799]: I0121 17:36:00.966023 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0","Type":"ContainerDied","Data":"30629310d99f7ccadefb0bb1fe78ddf3eb463ccb0dcc7ecea1d59b2cbccd87da"} Jan 21 17:36:00 crc kubenswrapper[4799]: I0121 17:36:00.990328 4799 generic.go:334] "Generic (PLEG): container finished" podID="2b880559-37c6-4d0e-983a-004d4d0991b6" containerID="366e2c1ed29c60c58404e0464cb13cbeee0d60a0eff8719f24e3bd5c29f3e1f7" exitCode=0 Jan 21 17:36:00 crc kubenswrapper[4799]: I0121 17:36:00.990436 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2b880559-37c6-4d0e-983a-004d4d0991b6","Type":"ContainerDied","Data":"366e2c1ed29c60c58404e0464cb13cbeee0d60a0eff8719f24e3bd5c29f3e1f7"} Jan 21 17:36:02 crc kubenswrapper[4799]: I0121 17:36:02.844079 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:36:02 crc kubenswrapper[4799]: I0121 17:36:02.950373 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b880559-37c6-4d0e-983a-004d4d0991b6-kubelet-dir\") pod \"2b880559-37c6-4d0e-983a-004d4d0991b6\" (UID: \"2b880559-37c6-4d0e-983a-004d4d0991b6\") " Jan 21 17:36:02 crc kubenswrapper[4799]: I0121 17:36:02.950462 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b880559-37c6-4d0e-983a-004d4d0991b6-kube-api-access\") pod \"2b880559-37c6-4d0e-983a-004d4d0991b6\" (UID: \"2b880559-37c6-4d0e-983a-004d4d0991b6\") " Jan 21 17:36:02 crc kubenswrapper[4799]: I0121 17:36:02.951301 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2b880559-37c6-4d0e-983a-004d4d0991b6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2b880559-37c6-4d0e-983a-004d4d0991b6" (UID: "2b880559-37c6-4d0e-983a-004d4d0991b6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:36:02 crc kubenswrapper[4799]: I0121 17:36:02.974179 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b880559-37c6-4d0e-983a-004d4d0991b6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2b880559-37c6-4d0e-983a-004d4d0991b6" (UID: "2b880559-37c6-4d0e-983a-004d4d0991b6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.024884 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2b880559-37c6-4d0e-983a-004d4d0991b6","Type":"ContainerDied","Data":"c75ca5baad4a806746b1de2f1a88b41fe532bb3fd2c5b3aff13603f0b9a70d43"} Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.024971 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.024989 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c75ca5baad4a806746b1de2f1a88b41fe532bb3fd2c5b3aff13603f0b9a70d43" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.053745 4799 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2b880559-37c6-4d0e-983a-004d4d0991b6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.053800 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2b880559-37c6-4d0e-983a-004d4d0991b6-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.108926 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.154766 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kubelet-dir\") pod \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\" (UID: \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\") " Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.154920 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kube-api-access\") pod \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\" (UID: \"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0\") " Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.154978 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0" (UID: "e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.156029 4799 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.161337 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0" (UID: "e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:36:03 crc kubenswrapper[4799]: I0121 17:36:03.257957 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.081616 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0","Type":"ContainerDied","Data":"c3abb1014e7fc24ab764e346f7e59f502d9b79f847c042e2a3dffd5841b25614"} Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.081696 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3abb1014e7fc24ab764e346f7e59f502d9b79f847c042e2a3dffd5841b25614" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.081985 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.545747 4799 patch_prober.go:28] interesting pod/console-f9d7485db-m875t container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.546268 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-m875t" podUID="46c59bb9-7544-496f-a38c-1054b3b95ae8" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.870104 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.870210 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.870322 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.870496 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.870604 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.871289 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.871371 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.871283 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"03d081da24b542b6efc72821727800ad7670f5ba496afc7d572f5d807e8a09ba"} pod="openshift-console/downloads-7954f5f757-6cjlt" containerMessage="Container download-server failed liveness probe, will be restarted" Jan 21 17:36:04 crc kubenswrapper[4799]: I0121 17:36:04.871552 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" containerID="cri-o://03d081da24b542b6efc72821727800ad7670f5ba496afc7d572f5d807e8a09ba" gracePeriod=2 Jan 21 17:36:06 crc kubenswrapper[4799]: I0121 17:36:06.130812 4799 generic.go:334] "Generic (PLEG): container finished" podID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerID="03d081da24b542b6efc72821727800ad7670f5ba496afc7d572f5d807e8a09ba" exitCode=0 Jan 21 17:36:06 crc kubenswrapper[4799]: I0121 17:36:06.131025 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-6cjlt" event={"ID":"ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a","Type":"ContainerDied","Data":"03d081da24b542b6efc72821727800ad7670f5ba496afc7d572f5d807e8a09ba"} Jan 21 17:36:13 crc kubenswrapper[4799]: I0121 17:36:13.083924 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:36:13 crc kubenswrapper[4799]: I0121 17:36:13.332286 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ftnlt" Jan 21 17:36:13 crc kubenswrapper[4799]: I0121 17:36:13.858691 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 21 17:36:14 crc kubenswrapper[4799]: I0121 17:36:14.592091 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:36:14 crc kubenswrapper[4799]: I0121 17:36:14.597246 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:36:14 crc kubenswrapper[4799]: I0121 17:36:14.870203 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:14 crc kubenswrapper[4799]: I0121 17:36:14.870301 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:24 crc kubenswrapper[4799]: I0121 17:36:24.872503 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:24 crc kubenswrapper[4799]: I0121 17:36:24.873229 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:25 crc kubenswrapper[4799]: I0121 17:36:25.974723 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:36:25 crc kubenswrapper[4799]: I0121 17:36:25.974892 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.784442 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 21 17:36:29 crc kubenswrapper[4799]: E0121 17:36:29.785261 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0" containerName="pruner" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.785296 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0" containerName="pruner" Jan 21 17:36:29 crc kubenswrapper[4799]: E0121 17:36:29.785310 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e82402a-bf1e-418b-9ec3-7723300db21b" containerName="collect-profiles" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.785317 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e82402a-bf1e-418b-9ec3-7723300db21b" containerName="collect-profiles" Jan 21 17:36:29 crc kubenswrapper[4799]: E0121 17:36:29.785326 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b880559-37c6-4d0e-983a-004d4d0991b6" containerName="pruner" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.785332 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b880559-37c6-4d0e-983a-004d4d0991b6" containerName="pruner" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.786188 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b880559-37c6-4d0e-983a-004d4d0991b6" containerName="pruner" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.786221 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e46e21d1-a2ce-42fb-a5cf-ab5df29c1aa0" containerName="pruner" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.786235 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e82402a-bf1e-418b-9ec3-7723300db21b" containerName="collect-profiles" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.786839 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.790074 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.793350 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.802254 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.977209 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52c172fe-9eb8-4ca3-b87a-f025780d600c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52c172fe-9eb8-4ca3-b87a-f025780d600c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:29 crc kubenswrapper[4799]: I0121 17:36:29.977421 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52c172fe-9eb8-4ca3-b87a-f025780d600c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52c172fe-9eb8-4ca3-b87a-f025780d600c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:30 crc kubenswrapper[4799]: I0121 17:36:30.079208 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52c172fe-9eb8-4ca3-b87a-f025780d600c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52c172fe-9eb8-4ca3-b87a-f025780d600c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:30 crc kubenswrapper[4799]: I0121 17:36:30.079612 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52c172fe-9eb8-4ca3-b87a-f025780d600c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52c172fe-9eb8-4ca3-b87a-f025780d600c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:30 crc kubenswrapper[4799]: I0121 17:36:30.079389 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52c172fe-9eb8-4ca3-b87a-f025780d600c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52c172fe-9eb8-4ca3-b87a-f025780d600c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:30 crc kubenswrapper[4799]: I0121 17:36:30.101851 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52c172fe-9eb8-4ca3-b87a-f025780d600c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52c172fe-9eb8-4ca3-b87a-f025780d600c\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:30 crc kubenswrapper[4799]: I0121 17:36:30.114365 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:34 crc kubenswrapper[4799]: E0121 17:36:34.466744 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 21 17:36:34 crc kubenswrapper[4799]: E0121 17:36:34.467999 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tw4nh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-hk87s_openshift-marketplace(2a7d46ef-dfda-4602-a004-c26ff4335788): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:36:34 crc kubenswrapper[4799]: E0121 17:36:34.469536 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-hk87s" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" Jan 21 17:36:34 crc kubenswrapper[4799]: I0121 17:36:34.871698 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:34 crc kubenswrapper[4799]: I0121 17:36:34.871761 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.378382 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.379582 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.392331 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.479282 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.479385 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kube-api-access\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.479447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-var-lock\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.581449 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kube-api-access\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.581602 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-var-lock\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.581686 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.581857 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kubelet-dir\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.581925 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-var-lock\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.605895 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kube-api-access\") pod \"installer-9-crc\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:35 crc kubenswrapper[4799]: I0121 17:36:35.727598 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.415653 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-hk87s" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.505422 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.506077 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4zls6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-rzgjt_openshift-marketplace(10d51c83-0754-4e1a-a39f-de83ea48bf7b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.508355 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-rzgjt" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.521024 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.521218 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v9c6v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-2z8fw_openshift-marketplace(3e7169e9-ed59-4259-bc63-a1079a9412c0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.522355 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-2z8fw" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.530116 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.530357 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rhd5x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-vccfc_openshift-marketplace(cfc335e9-4154-4713-a1b7-96f30bdab940): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:36:42 crc kubenswrapper[4799]: E0121 17:36:42.531940 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-vccfc" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" Jan 21 17:36:43 crc kubenswrapper[4799]: E0121 17:36:43.888458 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-vccfc" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" Jan 21 17:36:43 crc kubenswrapper[4799]: E0121 17:36:43.889026 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-rzgjt" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" Jan 21 17:36:43 crc kubenswrapper[4799]: E0121 17:36:43.889253 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-2z8fw" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" Jan 21 17:36:43 crc kubenswrapper[4799]: E0121 17:36:43.963102 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 21 17:36:43 crc kubenswrapper[4799]: E0121 17:36:43.963616 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7jgws,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-8qkfv_openshift-marketplace(cb30842a-4bc0-4d3d-aa45-ff611e019759): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:36:43 crc kubenswrapper[4799]: E0121 17:36:43.964846 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-8qkfv" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" Jan 21 17:36:44 crc kubenswrapper[4799]: I0121 17:36:44.869938 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:44 crc kubenswrapper[4799]: I0121 17:36:44.870013 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.528618 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-8qkfv" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.775121 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.775739 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5ktbc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-zk5bn_openshift-marketplace(2145d2a2-e101-44cb-b0c4-4161fbb910f8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.776964 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-zk5bn" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" Jan 21 17:36:46 crc kubenswrapper[4799]: I0121 17:36:46.799229 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-6cjlt" event={"ID":"ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a","Type":"ContainerStarted","Data":"d4e794210b16399c42ed35f4662383d18c6a7fd3cd9c986bd83d5380f3add080"} Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.803845 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-zk5bn" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.885035 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.885314 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7sps4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-xgvfc_openshift-marketplace(9a09e09d-8207-4727-9c4e-cea051cb063a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.886620 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-xgvfc" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.958067 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.958330 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gfnzp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-dbbcd_openshift-marketplace(ad7b0f43-cc68-4c74-967f-bc61107e6d0f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:36:46 crc kubenswrapper[4799]: E0121 17:36:46.959567 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-dbbcd" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.010033 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.162178 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.806164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"3c270f61-528f-4ab0-a8a9-46efc3c85b3a","Type":"ContainerStarted","Data":"ac578f8f1d1742acb482e78688867dd62e41148df40d044be5f06278e3420225"} Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.806861 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"3c270f61-528f-4ab0-a8a9-46efc3c85b3a","Type":"ContainerStarted","Data":"236cc45730c5084c4cfaf4b3a5009b3903448993180101146b8a729e388fcae8"} Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.811142 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"52c172fe-9eb8-4ca3-b87a-f025780d600c","Type":"ContainerStarted","Data":"987acfafae6806381550191048c1520f716947a86f5b258623e09b2919deb35e"} Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.811221 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"52c172fe-9eb8-4ca3-b87a-f025780d600c","Type":"ContainerStarted","Data":"536df08a3582463caa808040c3bd74c40ac7eb4cec82fc3e964b1b78d93bb468"} Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.812097 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.812163 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:47 crc kubenswrapper[4799]: E0121 17:36:47.823392 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-xgvfc" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" Jan 21 17:36:47 crc kubenswrapper[4799]: E0121 17:36:47.823400 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-dbbcd" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.827968 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=12.827928396 podStartE2EDuration="12.827928396s" podCreationTimestamp="2026-01-21 17:36:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:36:47.825687846 +0000 UTC m=+234.451977889" watchObservedRunningTime="2026-01-21 17:36:47.827928396 +0000 UTC m=+234.454218419" Jan 21 17:36:47 crc kubenswrapper[4799]: I0121 17:36:47.975593 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=18.975568984 podStartE2EDuration="18.975568984s" podCreationTimestamp="2026-01-21 17:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:36:47.94233996 +0000 UTC m=+234.568629983" watchObservedRunningTime="2026-01-21 17:36:47.975568984 +0000 UTC m=+234.601858997" Jan 21 17:36:48 crc kubenswrapper[4799]: I0121 17:36:48.817322 4799 generic.go:334] "Generic (PLEG): container finished" podID="52c172fe-9eb8-4ca3-b87a-f025780d600c" containerID="987acfafae6806381550191048c1520f716947a86f5b258623e09b2919deb35e" exitCode=0 Jan 21 17:36:48 crc kubenswrapper[4799]: I0121 17:36:48.817403 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"52c172fe-9eb8-4ca3-b87a-f025780d600c","Type":"ContainerDied","Data":"987acfafae6806381550191048c1520f716947a86f5b258623e09b2919deb35e"} Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.150067 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.314695 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52c172fe-9eb8-4ca3-b87a-f025780d600c-kube-api-access\") pod \"52c172fe-9eb8-4ca3-b87a-f025780d600c\" (UID: \"52c172fe-9eb8-4ca3-b87a-f025780d600c\") " Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.314743 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52c172fe-9eb8-4ca3-b87a-f025780d600c-kubelet-dir\") pod \"52c172fe-9eb8-4ca3-b87a-f025780d600c\" (UID: \"52c172fe-9eb8-4ca3-b87a-f025780d600c\") " Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.315174 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52c172fe-9eb8-4ca3-b87a-f025780d600c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "52c172fe-9eb8-4ca3-b87a-f025780d600c" (UID: "52c172fe-9eb8-4ca3-b87a-f025780d600c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.331190 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52c172fe-9eb8-4ca3-b87a-f025780d600c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "52c172fe-9eb8-4ca3-b87a-f025780d600c" (UID: "52c172fe-9eb8-4ca3-b87a-f025780d600c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.415937 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52c172fe-9eb8-4ca3-b87a-f025780d600c-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.415969 4799 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52c172fe-9eb8-4ca3-b87a-f025780d600c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.830196 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"52c172fe-9eb8-4ca3-b87a-f025780d600c","Type":"ContainerDied","Data":"536df08a3582463caa808040c3bd74c40ac7eb4cec82fc3e964b1b78d93bb468"} Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.830307 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 21 17:36:50 crc kubenswrapper[4799]: I0121 17:36:50.830340 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="536df08a3582463caa808040c3bd74c40ac7eb4cec82fc3e964b1b78d93bb468" Jan 21 17:36:54 crc kubenswrapper[4799]: I0121 17:36:54.869604 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:36:54 crc kubenswrapper[4799]: I0121 17:36:54.870051 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:54 crc kubenswrapper[4799]: I0121 17:36:54.870681 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:54 crc kubenswrapper[4799]: I0121 17:36:54.870267 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:54 crc kubenswrapper[4799]: I0121 17:36:54.870836 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:54 crc kubenswrapper[4799]: I0121 17:36:54.871610 4799 patch_prober.go:28] interesting pod/downloads-7954f5f757-6cjlt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Jan 21 17:36:54 crc kubenswrapper[4799]: I0121 17:36:54.871663 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-6cjlt" podUID="ee0cb7c7-ed21-41c2-80b6-1bdf15d1992a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Jan 21 17:36:55 crc kubenswrapper[4799]: I0121 17:36:55.971355 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:36:55 crc kubenswrapper[4799]: I0121 17:36:55.971461 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:36:55 crc kubenswrapper[4799]: I0121 17:36:55.971527 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:36:55 crc kubenswrapper[4799]: I0121 17:36:55.972320 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 17:36:55 crc kubenswrapper[4799]: I0121 17:36:55.972386 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31" gracePeriod=600 Jan 21 17:36:56 crc kubenswrapper[4799]: I0121 17:36:56.989970 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31" exitCode=0 Jan 21 17:36:56 crc kubenswrapper[4799]: I0121 17:36:56.990074 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31"} Jan 21 17:36:56 crc kubenswrapper[4799]: I0121 17:36:56.990522 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"b6462558f849eafd1973a1b2319347dad0de9388ecab61e98f6ea685f2b55daa"} Jan 21 17:37:00 crc kubenswrapper[4799]: I0121 17:37:00.044808 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgjt" event={"ID":"10d51c83-0754-4e1a-a39f-de83ea48bf7b","Type":"ContainerStarted","Data":"00959bca44834a71d21d39366577468682ec758788a23cacdfe816f20311f047"} Jan 21 17:37:00 crc kubenswrapper[4799]: I0121 17:37:00.055729 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk87s" event={"ID":"2a7d46ef-dfda-4602-a004-c26ff4335788","Type":"ContainerStarted","Data":"528e0d5716515625678bcc77f0317cdf43d92ebaa6a81a32cca6eb9851c3059a"} Jan 21 17:37:00 crc kubenswrapper[4799]: I0121 17:37:00.058897 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vccfc" event={"ID":"cfc335e9-4154-4713-a1b7-96f30bdab940","Type":"ContainerStarted","Data":"4a3adb7fdc20f816c07096284bfd9a60c036a62057d9e6d388541ecf1977fc60"} Jan 21 17:37:01 crc kubenswrapper[4799]: I0121 17:37:01.065459 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z8fw" event={"ID":"3e7169e9-ed59-4259-bc63-a1079a9412c0","Type":"ContainerStarted","Data":"3697fb48318bf9a5c6fafe224205c7c78afb08ba0257bd77e58275cca87b17d3"} Jan 21 17:37:01 crc kubenswrapper[4799]: I0121 17:37:01.068841 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk5bn" event={"ID":"2145d2a2-e101-44cb-b0c4-4161fbb910f8","Type":"ContainerStarted","Data":"071180c6b81bf24824080cd675475a0173dd6e745c6492f96804c44ca4d370fe"} Jan 21 17:37:02 crc kubenswrapper[4799]: I0121 17:37:02.124642 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qkfv" event={"ID":"cb30842a-4bc0-4d3d-aa45-ff611e019759","Type":"ContainerStarted","Data":"2889ffafe54d84b1bd3b627bbe9e22685eb8dcfb52fe9d59055fac566921c971"} Jan 21 17:37:03 crc kubenswrapper[4799]: I0121 17:37:03.186311 4799 generic.go:334] "Generic (PLEG): container finished" podID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerID="3697fb48318bf9a5c6fafe224205c7c78afb08ba0257bd77e58275cca87b17d3" exitCode=0 Jan 21 17:37:03 crc kubenswrapper[4799]: I0121 17:37:03.186402 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z8fw" event={"ID":"3e7169e9-ed59-4259-bc63-a1079a9412c0","Type":"ContainerDied","Data":"3697fb48318bf9a5c6fafe224205c7c78afb08ba0257bd77e58275cca87b17d3"} Jan 21 17:37:03 crc kubenswrapper[4799]: I0121 17:37:03.190713 4799 generic.go:334] "Generic (PLEG): container finished" podID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerID="071180c6b81bf24824080cd675475a0173dd6e745c6492f96804c44ca4d370fe" exitCode=0 Jan 21 17:37:03 crc kubenswrapper[4799]: I0121 17:37:03.190824 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk5bn" event={"ID":"2145d2a2-e101-44cb-b0c4-4161fbb910f8","Type":"ContainerDied","Data":"071180c6b81bf24824080cd675475a0173dd6e745c6492f96804c44ca4d370fe"} Jan 21 17:37:03 crc kubenswrapper[4799]: I0121 17:37:03.219395 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgvfc" event={"ID":"9a09e09d-8207-4727-9c4e-cea051cb063a","Type":"ContainerStarted","Data":"f69aeff0b61e7bbe316c67315621d7aa5577f8f83067d002fd28a348acc522c7"} Jan 21 17:37:04 crc kubenswrapper[4799]: I0121 17:37:04.228296 4799 generic.go:334] "Generic (PLEG): container finished" podID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerID="528e0d5716515625678bcc77f0317cdf43d92ebaa6a81a32cca6eb9851c3059a" exitCode=0 Jan 21 17:37:04 crc kubenswrapper[4799]: I0121 17:37:04.229898 4799 generic.go:334] "Generic (PLEG): container finished" podID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerID="f69aeff0b61e7bbe316c67315621d7aa5577f8f83067d002fd28a348acc522c7" exitCode=0 Jan 21 17:37:04 crc kubenswrapper[4799]: I0121 17:37:04.232987 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk87s" event={"ID":"2a7d46ef-dfda-4602-a004-c26ff4335788","Type":"ContainerDied","Data":"528e0d5716515625678bcc77f0317cdf43d92ebaa6a81a32cca6eb9851c3059a"} Jan 21 17:37:04 crc kubenswrapper[4799]: I0121 17:37:04.233022 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgvfc" event={"ID":"9a09e09d-8207-4727-9c4e-cea051cb063a","Type":"ContainerDied","Data":"f69aeff0b61e7bbe316c67315621d7aa5577f8f83067d002fd28a348acc522c7"} Jan 21 17:37:04 crc kubenswrapper[4799]: I0121 17:37:04.907650 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-6cjlt" Jan 21 17:37:05 crc kubenswrapper[4799]: I0121 17:37:05.239692 4799 generic.go:334] "Generic (PLEG): container finished" podID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerID="2889ffafe54d84b1bd3b627bbe9e22685eb8dcfb52fe9d59055fac566921c971" exitCode=0 Jan 21 17:37:05 crc kubenswrapper[4799]: I0121 17:37:05.239771 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qkfv" event={"ID":"cb30842a-4bc0-4d3d-aa45-ff611e019759","Type":"ContainerDied","Data":"2889ffafe54d84b1bd3b627bbe9e22685eb8dcfb52fe9d59055fac566921c971"} Jan 21 17:37:06 crc kubenswrapper[4799]: I0121 17:37:06.379171 4799 generic.go:334] "Generic (PLEG): container finished" podID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerID="00959bca44834a71d21d39366577468682ec758788a23cacdfe816f20311f047" exitCode=0 Jan 21 17:37:06 crc kubenswrapper[4799]: I0121 17:37:06.380224 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgjt" event={"ID":"10d51c83-0754-4e1a-a39f-de83ea48bf7b","Type":"ContainerDied","Data":"00959bca44834a71d21d39366577468682ec758788a23cacdfe816f20311f047"} Jan 21 17:37:07 crc kubenswrapper[4799]: I0121 17:37:07.389084 4799 generic.go:334] "Generic (PLEG): container finished" podID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerID="4a3adb7fdc20f816c07096284bfd9a60c036a62057d9e6d388541ecf1977fc60" exitCode=0 Jan 21 17:37:07 crc kubenswrapper[4799]: I0121 17:37:07.389179 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vccfc" event={"ID":"cfc335e9-4154-4713-a1b7-96f30bdab940","Type":"ContainerDied","Data":"4a3adb7fdc20f816c07096284bfd9a60c036a62057d9e6d388541ecf1977fc60"} Jan 21 17:37:09 crc kubenswrapper[4799]: I0121 17:37:09.068819 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-96sxw"] Jan 21 17:37:12 crc kubenswrapper[4799]: I0121 17:37:12.442345 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z8fw" event={"ID":"3e7169e9-ed59-4259-bc63-a1079a9412c0","Type":"ContainerStarted","Data":"935a8b5753828ee263122d0b2ff212e7a23701a1f73c48e8c000014ef3012d40"} Jan 21 17:37:12 crc kubenswrapper[4799]: I0121 17:37:12.463483 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbbcd" event={"ID":"ad7b0f43-cc68-4c74-967f-bc61107e6d0f","Type":"ContainerStarted","Data":"2401483a3d71e989777b609c2934905e490579bc35650997898ecafd35829b23"} Jan 21 17:37:12 crc kubenswrapper[4799]: I0121 17:37:12.472402 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk87s" event={"ID":"2a7d46ef-dfda-4602-a004-c26ff4335788","Type":"ContainerStarted","Data":"f1a1d70df6660288056043c348864079c14a39cad6849d02f3f5266aa9777071"} Jan 21 17:37:12 crc kubenswrapper[4799]: I0121 17:37:12.486819 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2z8fw" podStartSLOduration=7.373106342 podStartE2EDuration="1m31.486792314s" podCreationTimestamp="2026-01-21 17:35:41 +0000 UTC" firstStartedPulling="2026-01-21 17:35:47.699416389 +0000 UTC m=+174.325706412" lastFinishedPulling="2026-01-21 17:37:11.813102361 +0000 UTC m=+258.439392384" observedRunningTime="2026-01-21 17:37:12.480765694 +0000 UTC m=+259.107055717" watchObservedRunningTime="2026-01-21 17:37:12.486792314 +0000 UTC m=+259.113082337" Jan 21 17:37:12 crc kubenswrapper[4799]: I0121 17:37:12.510063 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hk87s" podStartSLOduration=8.74889827 podStartE2EDuration="1m31.510039634s" podCreationTimestamp="2026-01-21 17:35:41 +0000 UTC" firstStartedPulling="2026-01-21 17:35:49.045960748 +0000 UTC m=+175.672250771" lastFinishedPulling="2026-01-21 17:37:11.807102112 +0000 UTC m=+258.433392135" observedRunningTime="2026-01-21 17:37:12.507146453 +0000 UTC m=+259.133436476" watchObservedRunningTime="2026-01-21 17:37:12.510039634 +0000 UTC m=+259.136329657" Jan 21 17:37:12 crc kubenswrapper[4799]: I0121 17:37:12.570744 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:37:12 crc kubenswrapper[4799]: I0121 17:37:12.570833 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.603827 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk5bn" event={"ID":"2145d2a2-e101-44cb-b0c4-4161fbb910f8","Type":"ContainerStarted","Data":"6cc1b442f43d9361767272990f59b2dfa84829cd516565494db84a46501a2067"} Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.640202 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zk5bn" podStartSLOduration=8.410437118 podStartE2EDuration="1m32.640168569s" podCreationTimestamp="2026-01-21 17:35:41 +0000 UTC" firstStartedPulling="2026-01-21 17:35:47.848848809 +0000 UTC m=+174.475138822" lastFinishedPulling="2026-01-21 17:37:12.07858025 +0000 UTC m=+258.704870273" observedRunningTime="2026-01-21 17:37:13.637713402 +0000 UTC m=+260.264003445" watchObservedRunningTime="2026-01-21 17:37:13.640168569 +0000 UTC m=+260.266458592" Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.647540 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qkfv" event={"ID":"cb30842a-4bc0-4d3d-aa45-ff611e019759","Type":"ContainerStarted","Data":"5b23bfb30fa98c2cb0d232f39d79fbc7931f0f2f6a34c4332e6a750e438cd014"} Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.650552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgvfc" event={"ID":"9a09e09d-8207-4727-9c4e-cea051cb063a","Type":"ContainerStarted","Data":"982414733ba23d2d420412d9f4185d3c9f0e0b79e35ff6b869ff0045b47dc2a2"} Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.652609 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgjt" event={"ID":"10d51c83-0754-4e1a-a39f-de83ea48bf7b","Type":"ContainerStarted","Data":"76881bed8edd53b98da842868dca21297ec004b9841967785b4575f7b7395101"} Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.656158 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vccfc" event={"ID":"cfc335e9-4154-4713-a1b7-96f30bdab940","Type":"ContainerStarted","Data":"55b9815d61ead17298817e2f0805988d69be05ed02438114a47f717032235d6b"} Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.775225 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8qkfv" podStartSLOduration=8.526909321 podStartE2EDuration="1m32.775194329s" podCreationTimestamp="2026-01-21 17:35:41 +0000 UTC" firstStartedPulling="2026-01-21 17:35:47.716308652 +0000 UTC m=+174.342598675" lastFinishedPulling="2026-01-21 17:37:11.96459366 +0000 UTC m=+258.590883683" observedRunningTime="2026-01-21 17:37:13.759302149 +0000 UTC m=+260.385592162" watchObservedRunningTime="2026-01-21 17:37:13.775194329 +0000 UTC m=+260.401484352" Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.861096 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vccfc" podStartSLOduration=8.392878053 podStartE2EDuration="1m29.861066986s" podCreationTimestamp="2026-01-21 17:35:44 +0000 UTC" firstStartedPulling="2026-01-21 17:35:50.469162331 +0000 UTC m=+177.095452344" lastFinishedPulling="2026-01-21 17:37:11.937351254 +0000 UTC m=+258.563641277" observedRunningTime="2026-01-21 17:37:13.813813641 +0000 UTC m=+260.440103674" watchObservedRunningTime="2026-01-21 17:37:13.861066986 +0000 UTC m=+260.487357009" Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.989311 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xgvfc" podStartSLOduration=8.982396996 podStartE2EDuration="1m30.989283004s" podCreationTimestamp="2026-01-21 17:35:43 +0000 UTC" firstStartedPulling="2026-01-21 17:35:50.474995651 +0000 UTC m=+177.101285674" lastFinishedPulling="2026-01-21 17:37:12.481881669 +0000 UTC m=+259.108171682" observedRunningTime="2026-01-21 17:37:13.888468617 +0000 UTC m=+260.514758650" watchObservedRunningTime="2026-01-21 17:37:13.989283004 +0000 UTC m=+260.615573027" Jan 21 17:37:13 crc kubenswrapper[4799]: I0121 17:37:13.989604 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rzgjt" podStartSLOduration=7.034857646 podStartE2EDuration="1m29.989599654s" podCreationTimestamp="2026-01-21 17:35:44 +0000 UTC" firstStartedPulling="2026-01-21 17:35:49.029161728 +0000 UTC m=+175.655451751" lastFinishedPulling="2026-01-21 17:37:11.983903736 +0000 UTC m=+258.610193759" observedRunningTime="2026-01-21 17:37:13.864964128 +0000 UTC m=+260.491254151" watchObservedRunningTime="2026-01-21 17:37:13.989599654 +0000 UTC m=+260.615889677" Jan 21 17:37:14 crc kubenswrapper[4799]: I0121 17:37:14.165467 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-2z8fw" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="registry-server" probeResult="failure" output=< Jan 21 17:37:14 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 17:37:14 crc kubenswrapper[4799]: > Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.109053 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8qkfv"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.109710 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8qkfv" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerName="registry-server" containerID="cri-o://5b23bfb30fa98c2cb0d232f39d79fbc7931f0f2f6a34c4332e6a750e438cd014" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.121632 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zk5bn"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.121949 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zk5bn" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerName="registry-server" containerID="cri-o://6cc1b442f43d9361767272990f59b2dfa84829cd516565494db84a46501a2067" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.135547 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2z8fw"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.136212 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2z8fw" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="registry-server" containerID="cri-o://935a8b5753828ee263122d0b2ff212e7a23701a1f73c48e8c000014ef3012d40" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.147888 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hk87s"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.148228 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hk87s" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerName="registry-server" containerID="cri-o://f1a1d70df6660288056043c348864079c14a39cad6849d02f3f5266aa9777071" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.158028 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2wknp"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.158408 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" containerID="cri-o://014f524f4df5adbc43364e6d3e0479200d0e879553db408ed36892c3f2122c58" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.168257 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbbcd"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.168552 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dbbcd" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" containerName="extract-content" containerID="cri-o://2401483a3d71e989777b609c2934905e490579bc35650997898ecafd35829b23" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.175755 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgvfc"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.176099 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xgvfc" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerName="registry-server" containerID="cri-o://982414733ba23d2d420412d9f4185d3c9f0e0b79e35ff6b869ff0045b47dc2a2" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.186513 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4vrr"] Jan 21 17:37:15 crc kubenswrapper[4799]: E0121 17:37:15.187055 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52c172fe-9eb8-4ca3-b87a-f025780d600c" containerName="pruner" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.187076 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="52c172fe-9eb8-4ca3-b87a-f025780d600c" containerName="pruner" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.187247 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="52c172fe-9eb8-4ca3-b87a-f025780d600c" containerName="pruner" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.187992 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.188234 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rzgjt"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.188523 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rzgjt" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerName="registry-server" containerID="cri-o://76881bed8edd53b98da842868dca21297ec004b9841967785b4575f7b7395101" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.204730 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4vrr"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.214215 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vccfc"] Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.214513 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vccfc" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerName="registry-server" containerID="cri-o://55b9815d61ead17298817e2f0805988d69be05ed02438114a47f717032235d6b" gracePeriod=30 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.334743 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.358710 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1f389163-50cd-4aaa-9b7c-82358ab47826-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.358951 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2lqd\" (UniqueName: \"kubernetes.io/projected/1f389163-50cd-4aaa-9b7c-82358ab47826-kube-api-access-n2lqd\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.359118 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f389163-50cd-4aaa-9b7c-82358ab47826-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.459986 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2lqd\" (UniqueName: \"kubernetes.io/projected/1f389163-50cd-4aaa-9b7c-82358ab47826-kube-api-access-n2lqd\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.460074 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f389163-50cd-4aaa-9b7c-82358ab47826-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.460140 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1f389163-50cd-4aaa-9b7c-82358ab47826-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.461798 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f389163-50cd-4aaa-9b7c-82358ab47826-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.467764 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1f389163-50cd-4aaa-9b7c-82358ab47826-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.480445 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2lqd\" (UniqueName: \"kubernetes.io/projected/1f389163-50cd-4aaa-9b7c-82358ab47826-kube-api-access-n2lqd\") pod \"marketplace-operator-79b997595-g4vrr\" (UID: \"1f389163-50cd-4aaa-9b7c-82358ab47826\") " pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.515353 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.608646 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.693442 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.722269 4799 generic.go:334] "Generic (PLEG): container finished" podID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" containerID="2401483a3d71e989777b609c2934905e490579bc35650997898ecafd35829b23" exitCode=0 Jan 21 17:37:15 crc kubenswrapper[4799]: I0121 17:37:15.722353 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbbcd" event={"ID":"ad7b0f43-cc68-4c74-967f-bc61107e6d0f","Type":"ContainerDied","Data":"2401483a3d71e989777b609c2934905e490579bc35650997898ecafd35829b23"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.064029 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g4vrr"] Jan 21 17:37:16 crc kubenswrapper[4799]: W0121 17:37:16.206139 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f389163_50cd_4aaa_9b7c_82358ab47826.slice/crio-87478d431a6491e88136b089517e8e42a670b41cd066d1d06eaf191005e5949f WatchSource:0}: Error finding container 87478d431a6491e88136b089517e8e42a670b41cd066d1d06eaf191005e5949f: Status 404 returned error can't find the container with id 87478d431a6491e88136b089517e8e42a670b41cd066d1d06eaf191005e5949f Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.420820 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.585503 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-catalog-content\") pod \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.585802 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-utilities\") pod \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.585907 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfnzp\" (UniqueName: \"kubernetes.io/projected/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-kube-api-access-gfnzp\") pod \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\" (UID: \"ad7b0f43-cc68-4c74-967f-bc61107e6d0f\") " Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.586522 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-utilities" (OuterVolumeSpecName: "utilities") pod "ad7b0f43-cc68-4c74-967f-bc61107e6d0f" (UID: "ad7b0f43-cc68-4c74-967f-bc61107e6d0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.592383 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-kube-api-access-gfnzp" (OuterVolumeSpecName: "kube-api-access-gfnzp") pod "ad7b0f43-cc68-4c74-967f-bc61107e6d0f" (UID: "ad7b0f43-cc68-4c74-967f-bc61107e6d0f"). InnerVolumeSpecName "kube-api-access-gfnzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.618398 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad7b0f43-cc68-4c74-967f-bc61107e6d0f" (UID: "ad7b0f43-cc68-4c74-967f-bc61107e6d0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.687220 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.687259 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfnzp\" (UniqueName: \"kubernetes.io/projected/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-kube-api-access-gfnzp\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.687271 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad7b0f43-cc68-4c74-967f-bc61107e6d0f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.732711 4799 generic.go:334] "Generic (PLEG): container finished" podID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerID="014f524f4df5adbc43364e6d3e0479200d0e879553db408ed36892c3f2122c58" exitCode=0 Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.732806 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" event={"ID":"06fabbfb-ca52-4980-9478-5fbe09bca884","Type":"ContainerDied","Data":"014f524f4df5adbc43364e6d3e0479200d0e879553db408ed36892c3f2122c58"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.735356 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbbcd" event={"ID":"ad7b0f43-cc68-4c74-967f-bc61107e6d0f","Type":"ContainerDied","Data":"0be8e626636345a4e29e70cc2bcd1fe9ea856fe37160d0d16d75a247031f4acb"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.735442 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dbbcd" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.735453 4799 scope.go:117] "RemoveContainer" containerID="2401483a3d71e989777b609c2934905e490579bc35650997898ecafd35829b23" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.736813 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" event={"ID":"1f389163-50cd-4aaa-9b7c-82358ab47826","Type":"ContainerStarted","Data":"87478d431a6491e88136b089517e8e42a670b41cd066d1d06eaf191005e5949f"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.739682 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hk87s_2a7d46ef-dfda-4602-a004-c26ff4335788/registry-server/0.log" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.740614 4799 generic.go:334] "Generic (PLEG): container finished" podID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerID="f1a1d70df6660288056043c348864079c14a39cad6849d02f3f5266aa9777071" exitCode=1 Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.740679 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk87s" event={"ID":"2a7d46ef-dfda-4602-a004-c26ff4335788","Type":"ContainerDied","Data":"f1a1d70df6660288056043c348864079c14a39cad6849d02f3f5266aa9777071"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.742962 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2z8fw_3e7169e9-ed59-4259-bc63-a1079a9412c0/registry-server/0.log" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.743743 4799 generic.go:334] "Generic (PLEG): container finished" podID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerID="935a8b5753828ee263122d0b2ff212e7a23701a1f73c48e8c000014ef3012d40" exitCode=1 Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.743780 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z8fw" event={"ID":"3e7169e9-ed59-4259-bc63-a1079a9412c0","Type":"ContainerDied","Data":"935a8b5753828ee263122d0b2ff212e7a23701a1f73c48e8c000014ef3012d40"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.745423 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zk5bn_2145d2a2-e101-44cb-b0c4-4161fbb910f8/registry-server/0.log" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.746101 4799 generic.go:334] "Generic (PLEG): container finished" podID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerID="6cc1b442f43d9361767272990f59b2dfa84829cd516565494db84a46501a2067" exitCode=1 Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.746173 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk5bn" event={"ID":"2145d2a2-e101-44cb-b0c4-4161fbb910f8","Type":"ContainerDied","Data":"6cc1b442f43d9361767272990f59b2dfa84829cd516565494db84a46501a2067"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.747295 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8qkfv_cb30842a-4bc0-4d3d-aa45-ff611e019759/registry-server/0.log" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.747860 4799 generic.go:334] "Generic (PLEG): container finished" podID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerID="5b23bfb30fa98c2cb0d232f39d79fbc7931f0f2f6a34c4332e6a750e438cd014" exitCode=1 Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.747898 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qkfv" event={"ID":"cb30842a-4bc0-4d3d-aa45-ff611e019759","Type":"ContainerDied","Data":"5b23bfb30fa98c2cb0d232f39d79fbc7931f0f2f6a34c4332e6a750e438cd014"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.749446 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgvfc_9a09e09d-8207-4727-9c4e-cea051cb063a/registry-server/0.log" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.750169 4799 generic.go:334] "Generic (PLEG): container finished" podID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerID="982414733ba23d2d420412d9f4185d3c9f0e0b79e35ff6b869ff0045b47dc2a2" exitCode=1 Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.750193 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgvfc" event={"ID":"9a09e09d-8207-4727-9c4e-cea051cb063a","Type":"ContainerDied","Data":"982414733ba23d2d420412d9f4185d3c9f0e0b79e35ff6b869ff0045b47dc2a2"} Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.757284 4799 scope.go:117] "RemoveContainer" containerID="63d3065eb6810eddd864256c0f40e8859b5a91834acd8e5e0feafba402ff2087" Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.791551 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbbcd"] Jan 21 17:37:16 crc kubenswrapper[4799]: I0121 17:37:16.794294 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbbcd"] Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.765240 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" event={"ID":"1f389163-50cd-4aaa-9b7c-82358ab47826","Type":"ContainerStarted","Data":"abf47274832e2324555c954e46a6cbfe53a24c5151d777c61daeed3fa213522b"} Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.767883 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rzgjt_10d51c83-0754-4e1a-a39f-de83ea48bf7b/registry-server/0.log" Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.769818 4799 generic.go:334] "Generic (PLEG): container finished" podID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerID="76881bed8edd53b98da842868dca21297ec004b9841967785b4575f7b7395101" exitCode=1 Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.769922 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgjt" event={"ID":"10d51c83-0754-4e1a-a39f-de83ea48bf7b","Type":"ContainerDied","Data":"76881bed8edd53b98da842868dca21297ec004b9841967785b4575f7b7395101"} Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.773685 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vccfc_cfc335e9-4154-4713-a1b7-96f30bdab940/registry-server/0.log" Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.774810 4799 generic.go:334] "Generic (PLEG): container finished" podID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerID="55b9815d61ead17298817e2f0805988d69be05ed02438114a47f717032235d6b" exitCode=1 Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.774886 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vccfc" event={"ID":"cfc335e9-4154-4713-a1b7-96f30bdab940","Type":"ContainerDied","Data":"55b9815d61ead17298817e2f0805988d69be05ed02438114a47f717032235d6b"} Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.883000 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hk87s_2a7d46ef-dfda-4602-a004-c26ff4335788/registry-server/0.log" Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.884735 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.982287 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8qkfv_cb30842a-4bc0-4d3d-aa45-ff611e019759/registry-server/0.log" Jan 21 17:37:17 crc kubenswrapper[4799]: I0121 17:37:17.984012 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.015435 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw4nh\" (UniqueName: \"kubernetes.io/projected/2a7d46ef-dfda-4602-a004-c26ff4335788-kube-api-access-tw4nh\") pod \"2a7d46ef-dfda-4602-a004-c26ff4335788\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.015721 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-catalog-content\") pod \"2a7d46ef-dfda-4602-a004-c26ff4335788\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.016584 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-utilities\") pod \"2a7d46ef-dfda-4602-a004-c26ff4335788\" (UID: \"2a7d46ef-dfda-4602-a004-c26ff4335788\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.018917 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-utilities" (OuterVolumeSpecName: "utilities") pod "2a7d46ef-dfda-4602-a004-c26ff4335788" (UID: "2a7d46ef-dfda-4602-a004-c26ff4335788"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.066335 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a7d46ef-dfda-4602-a004-c26ff4335788-kube-api-access-tw4nh" (OuterVolumeSpecName: "kube-api-access-tw4nh") pod "2a7d46ef-dfda-4602-a004-c26ff4335788" (UID: "2a7d46ef-dfda-4602-a004-c26ff4335788"). InnerVolumeSpecName "kube-api-access-tw4nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.092147 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgvfc_9a09e09d-8207-4727-9c4e-cea051cb063a/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.094173 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.119219 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jgws\" (UniqueName: \"kubernetes.io/projected/cb30842a-4bc0-4d3d-aa45-ff611e019759-kube-api-access-7jgws\") pod \"cb30842a-4bc0-4d3d-aa45-ff611e019759\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.119294 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-catalog-content\") pod \"cb30842a-4bc0-4d3d-aa45-ff611e019759\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.119370 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-utilities\") pod \"cb30842a-4bc0-4d3d-aa45-ff611e019759\" (UID: \"cb30842a-4bc0-4d3d-aa45-ff611e019759\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.119878 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw4nh\" (UniqueName: \"kubernetes.io/projected/2a7d46ef-dfda-4602-a004-c26ff4335788-kube-api-access-tw4nh\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.119917 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.121208 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zk5bn_2145d2a2-e101-44cb-b0c4-4161fbb910f8/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.121394 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-utilities" (OuterVolumeSpecName: "utilities") pod "cb30842a-4bc0-4d3d-aa45-ff611e019759" (UID: "cb30842a-4bc0-4d3d-aa45-ff611e019759"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.122355 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.130893 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb30842a-4bc0-4d3d-aa45-ff611e019759-kube-api-access-7jgws" (OuterVolumeSpecName: "kube-api-access-7jgws") pod "cb30842a-4bc0-4d3d-aa45-ff611e019759" (UID: "cb30842a-4bc0-4d3d-aa45-ff611e019759"). InnerVolumeSpecName "kube-api-access-7jgws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.170982 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.178224 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2z8fw_3e7169e9-ed59-4259-bc63-a1079a9412c0/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.179378 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.191346 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb30842a-4bc0-4d3d-aa45-ff611e019759" (UID: "cb30842a-4bc0-4d3d-aa45-ff611e019759"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223066 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ktbc\" (UniqueName: \"kubernetes.io/projected/2145d2a2-e101-44cb-b0c4-4161fbb910f8-kube-api-access-5ktbc\") pod \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223177 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sps4\" (UniqueName: \"kubernetes.io/projected/9a09e09d-8207-4727-9c4e-cea051cb063a-kube-api-access-7sps4\") pod \"9a09e09d-8207-4727-9c4e-cea051cb063a\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223202 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-utilities\") pod \"9a09e09d-8207-4727-9c4e-cea051cb063a\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223242 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-catalog-content\") pod \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223270 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-catalog-content\") pod \"9a09e09d-8207-4727-9c4e-cea051cb063a\" (UID: \"9a09e09d-8207-4727-9c4e-cea051cb063a\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223299 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-utilities\") pod \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\" (UID: \"2145d2a2-e101-44cb-b0c4-4161fbb910f8\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223349 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" path="/var/lib/kubelet/pods/ad7b0f43-cc68-4c74-967f-bc61107e6d0f/volumes" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223601 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jgws\" (UniqueName: \"kubernetes.io/projected/cb30842a-4bc0-4d3d-aa45-ff611e019759-kube-api-access-7jgws\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223622 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.223635 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb30842a-4bc0-4d3d-aa45-ff611e019759-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.224549 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-utilities" (OuterVolumeSpecName: "utilities") pod "9a09e09d-8207-4727-9c4e-cea051cb063a" (UID: "9a09e09d-8207-4727-9c4e-cea051cb063a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.224583 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-utilities" (OuterVolumeSpecName: "utilities") pod "2145d2a2-e101-44cb-b0c4-4161fbb910f8" (UID: "2145d2a2-e101-44cb-b0c4-4161fbb910f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.229417 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a09e09d-8207-4727-9c4e-cea051cb063a-kube-api-access-7sps4" (OuterVolumeSpecName: "kube-api-access-7sps4") pod "9a09e09d-8207-4727-9c4e-cea051cb063a" (UID: "9a09e09d-8207-4727-9c4e-cea051cb063a"). InnerVolumeSpecName "kube-api-access-7sps4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.229828 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2145d2a2-e101-44cb-b0c4-4161fbb910f8-kube-api-access-5ktbc" (OuterVolumeSpecName: "kube-api-access-5ktbc") pod "2145d2a2-e101-44cb-b0c4-4161fbb910f8" (UID: "2145d2a2-e101-44cb-b0c4-4161fbb910f8"). InnerVolumeSpecName "kube-api-access-5ktbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.265604 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a09e09d-8207-4727-9c4e-cea051cb063a" (UID: "9a09e09d-8207-4727-9c4e-cea051cb063a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.287365 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rzgjt_10d51c83-0754-4e1a-a39f-de83ea48bf7b/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.290191 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.324435 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2145d2a2-e101-44cb-b0c4-4161fbb910f8" (UID: "2145d2a2-e101-44cb-b0c4-4161fbb910f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.325353 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-catalog-content\") pod \"3e7169e9-ed59-4259-bc63-a1079a9412c0\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.326603 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9c6v\" (UniqueName: \"kubernetes.io/projected/3e7169e9-ed59-4259-bc63-a1079a9412c0-kube-api-access-v9c6v\") pod \"3e7169e9-ed59-4259-bc63-a1079a9412c0\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.326658 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-484w2\" (UniqueName: \"kubernetes.io/projected/06fabbfb-ca52-4980-9478-5fbe09bca884-kube-api-access-484w2\") pod \"06fabbfb-ca52-4980-9478-5fbe09bca884\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.326701 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-trusted-ca\") pod \"06fabbfb-ca52-4980-9478-5fbe09bca884\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.326776 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-utilities\") pod \"3e7169e9-ed59-4259-bc63-a1079a9412c0\" (UID: \"3e7169e9-ed59-4259-bc63-a1079a9412c0\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.326836 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-operator-metrics\") pod \"06fabbfb-ca52-4980-9478-5fbe09bca884\" (UID: \"06fabbfb-ca52-4980-9478-5fbe09bca884\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.327494 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sps4\" (UniqueName: \"kubernetes.io/projected/9a09e09d-8207-4727-9c4e-cea051cb063a-kube-api-access-7sps4\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.327520 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.327533 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.327551 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a09e09d-8207-4727-9c4e-cea051cb063a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.327563 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2145d2a2-e101-44cb-b0c4-4161fbb910f8-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.327575 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ktbc\" (UniqueName: \"kubernetes.io/projected/2145d2a2-e101-44cb-b0c4-4161fbb910f8-kube-api-access-5ktbc\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.332451 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "06fabbfb-ca52-4980-9478-5fbe09bca884" (UID: "06fabbfb-ca52-4980-9478-5fbe09bca884"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.334205 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-utilities" (OuterVolumeSpecName: "utilities") pod "3e7169e9-ed59-4259-bc63-a1079a9412c0" (UID: "3e7169e9-ed59-4259-bc63-a1079a9412c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.336219 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "06fabbfb-ca52-4980-9478-5fbe09bca884" (UID: "06fabbfb-ca52-4980-9478-5fbe09bca884"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.337382 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06fabbfb-ca52-4980-9478-5fbe09bca884-kube-api-access-484w2" (OuterVolumeSpecName: "kube-api-access-484w2") pod "06fabbfb-ca52-4980-9478-5fbe09bca884" (UID: "06fabbfb-ca52-4980-9478-5fbe09bca884"). InnerVolumeSpecName "kube-api-access-484w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.348863 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e7169e9-ed59-4259-bc63-a1079a9412c0-kube-api-access-v9c6v" (OuterVolumeSpecName: "kube-api-access-v9c6v") pod "3e7169e9-ed59-4259-bc63-a1079a9412c0" (UID: "3e7169e9-ed59-4259-bc63-a1079a9412c0"). InnerVolumeSpecName "kube-api-access-v9c6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.388231 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e7169e9-ed59-4259-bc63-a1079a9412c0" (UID: "3e7169e9-ed59-4259-bc63-a1079a9412c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.428738 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zls6\" (UniqueName: \"kubernetes.io/projected/10d51c83-0754-4e1a-a39f-de83ea48bf7b-kube-api-access-4zls6\") pod \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.428836 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-catalog-content\") pod \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.428994 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-utilities\") pod \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\" (UID: \"10d51c83-0754-4e1a-a39f-de83ea48bf7b\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.429296 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.429317 4799 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.429328 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e7169e9-ed59-4259-bc63-a1079a9412c0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.429338 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9c6v\" (UniqueName: \"kubernetes.io/projected/3e7169e9-ed59-4259-bc63-a1079a9412c0-kube-api-access-v9c6v\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.429364 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-484w2\" (UniqueName: \"kubernetes.io/projected/06fabbfb-ca52-4980-9478-5fbe09bca884-kube-api-access-484w2\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.429373 4799 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06fabbfb-ca52-4980-9478-5fbe09bca884-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.430383 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-utilities" (OuterVolumeSpecName: "utilities") pod "10d51c83-0754-4e1a-a39f-de83ea48bf7b" (UID: "10d51c83-0754-4e1a-a39f-de83ea48bf7b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.433387 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10d51c83-0754-4e1a-a39f-de83ea48bf7b-kube-api-access-4zls6" (OuterVolumeSpecName: "kube-api-access-4zls6") pod "10d51c83-0754-4e1a-a39f-de83ea48bf7b" (UID: "10d51c83-0754-4e1a-a39f-de83ea48bf7b"). InnerVolumeSpecName "kube-api-access-4zls6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.531509 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.531555 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zls6\" (UniqueName: \"kubernetes.io/projected/10d51c83-0754-4e1a-a39f-de83ea48bf7b-kube-api-access-4zls6\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.625521 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a7d46ef-dfda-4602-a004-c26ff4335788" (UID: "2a7d46ef-dfda-4602-a004-c26ff4335788"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.632869 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7d46ef-dfda-4602-a004-c26ff4335788-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.654391 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vccfc_cfc335e9-4154-4713-a1b7-96f30bdab940/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.655482 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.734270 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhd5x\" (UniqueName: \"kubernetes.io/projected/cfc335e9-4154-4713-a1b7-96f30bdab940-kube-api-access-rhd5x\") pod \"cfc335e9-4154-4713-a1b7-96f30bdab940\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.734398 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-utilities\") pod \"cfc335e9-4154-4713-a1b7-96f30bdab940\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.734431 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-catalog-content\") pod \"cfc335e9-4154-4713-a1b7-96f30bdab940\" (UID: \"cfc335e9-4154-4713-a1b7-96f30bdab940\") " Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.735301 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-utilities" (OuterVolumeSpecName: "utilities") pod "cfc335e9-4154-4713-a1b7-96f30bdab940" (UID: "cfc335e9-4154-4713-a1b7-96f30bdab940"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.737642 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc335e9-4154-4713-a1b7-96f30bdab940-kube-api-access-rhd5x" (OuterVolumeSpecName: "kube-api-access-rhd5x") pod "cfc335e9-4154-4713-a1b7-96f30bdab940" (UID: "cfc335e9-4154-4713-a1b7-96f30bdab940"). InnerVolumeSpecName "kube-api-access-rhd5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.785121 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8qkfv_cb30842a-4bc0-4d3d-aa45-ff611e019759/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.786097 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qkfv" event={"ID":"cb30842a-4bc0-4d3d-aa45-ff611e019759","Type":"ContainerDied","Data":"e49c154cff1de0755c12ed36122c3e77e41b4af40ce5eb02370385891bf40c96"} Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.786174 4799 scope.go:117] "RemoveContainer" containerID="5b23bfb30fa98c2cb0d232f39d79fbc7931f0f2f6a34c4332e6a750e438cd014" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.786185 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8qkfv" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.789221 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgvfc_9a09e09d-8207-4727-9c4e-cea051cb063a/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.790151 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgvfc" event={"ID":"9a09e09d-8207-4727-9c4e-cea051cb063a","Type":"ContainerDied","Data":"da17ceb37134c3bed3b69a22f8fca7eb4e4866d05848c981d16827426c18bead"} Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.790248 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgvfc" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.793895 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" event={"ID":"06fabbfb-ca52-4980-9478-5fbe09bca884","Type":"ContainerDied","Data":"cc6f950e915aa62fac121f5f4fc749c772fad01d63ed2ee3bb06a4067cc35f46"} Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.794177 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2wknp" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.798692 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rzgjt_10d51c83-0754-4e1a-a39f-de83ea48bf7b/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.799996 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzgjt" event={"ID":"10d51c83-0754-4e1a-a39f-de83ea48bf7b","Type":"ContainerDied","Data":"d40327d1b0df9a0f34d6298c7f771426310bc379af405be478cdfd5321a1bb0f"} Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.800147 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzgjt" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.804146 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hk87s_2a7d46ef-dfda-4602-a004-c26ff4335788/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.805713 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hk87s" event={"ID":"2a7d46ef-dfda-4602-a004-c26ff4335788","Type":"ContainerDied","Data":"c58e2c5986ecd33cdec580f18688d5303c79da5b87b6317a778ef1d45b5297db"} Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.805733 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hk87s" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.808111 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vccfc_cfc335e9-4154-4713-a1b7-96f30bdab940/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.810774 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vccfc" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.811033 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vccfc" event={"ID":"cfc335e9-4154-4713-a1b7-96f30bdab940","Type":"ContainerDied","Data":"f6ccf8a5397af8494e8f7132dd8d718728e070b0cc081623749ed5ff986c4237"} Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.813791 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2z8fw_3e7169e9-ed59-4259-bc63-a1079a9412c0/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.815049 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z8fw" event={"ID":"3e7169e9-ed59-4259-bc63-a1079a9412c0","Type":"ContainerDied","Data":"08f2d23a460bf45492f7e4e10bfbca1c192a0f5dca16997bcb391235ead0a89c"} Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.815213 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2z8fw" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.826639 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zk5bn_2145d2a2-e101-44cb-b0c4-4161fbb910f8/registry-server/0.log" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.828221 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zk5bn" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.829048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zk5bn" event={"ID":"2145d2a2-e101-44cb-b0c4-4161fbb910f8","Type":"ContainerDied","Data":"360e8e916da6cf0414e65b875187ecaa9eb6fff4a48dc47ad89861d1a1ea582f"} Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.829104 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.838720 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhd5x\" (UniqueName: \"kubernetes.io/projected/cfc335e9-4154-4713-a1b7-96f30bdab940-kube-api-access-rhd5x\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.838762 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.838807 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8qkfv"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.839175 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.847485 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f9tnv"] Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848153 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848176 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848192 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848203 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848213 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848220 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848234 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848243 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848253 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848261 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848272 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848281 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848296 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848303 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848311 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848317 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848327 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848333 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848341 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848348 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848357 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848365 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848372 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848379 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848386 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848392 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848402 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848408 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848417 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848425 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848435 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848441 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848449 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848455 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848461 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848467 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848473 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848480 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848490 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848496 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848508 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848514 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848522 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848530 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848549 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848556 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerName="extract-utilities" Jan 21 17:37:18 crc kubenswrapper[4799]: E0121 17:37:18.848565 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848573 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848685 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" containerName="marketplace-operator" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848697 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848706 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848717 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848729 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848737 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848744 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848752 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" containerName="registry-server" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.848760 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad7b0f43-cc68-4c74-967f-bc61107e6d0f" containerName="extract-content" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.849794 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.851456 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.856504 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8qkfv"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.861064 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f9tnv"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.863783 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgvfc"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.866212 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgvfc"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.869061 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2wknp"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.872155 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2wknp"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.915072 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-g4vrr" podStartSLOduration=3.915047145 podStartE2EDuration="3.915047145s" podCreationTimestamp="2026-01-21 17:37:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:37:18.893720485 +0000 UTC m=+265.520010508" watchObservedRunningTime="2026-01-21 17:37:18.915047145 +0000 UTC m=+265.541337168" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.917245 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2z8fw"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.921093 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2z8fw"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.940348 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb24916-faef-4a1c-8e2c-c51d108d915e-catalog-content\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.940439 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb24916-faef-4a1c-8e2c-c51d108d915e-utilities\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.940549 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqh8v\" (UniqueName: \"kubernetes.io/projected/5cb24916-faef-4a1c-8e2c-c51d108d915e-kube-api-access-lqh8v\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.951206 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hk87s"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.955710 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hk87s"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.969410 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zk5bn"] Jan 21 17:37:18 crc kubenswrapper[4799]: I0121 17:37:18.972685 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zk5bn"] Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.041733 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqh8v\" (UniqueName: \"kubernetes.io/projected/5cb24916-faef-4a1c-8e2c-c51d108d915e-kube-api-access-lqh8v\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.041861 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb24916-faef-4a1c-8e2c-c51d108d915e-catalog-content\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.042455 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb24916-faef-4a1c-8e2c-c51d108d915e-catalog-content\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.042599 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb24916-faef-4a1c-8e2c-c51d108d915e-utilities\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.042897 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb24916-faef-4a1c-8e2c-c51d108d915e-utilities\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.065165 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10d51c83-0754-4e1a-a39f-de83ea48bf7b" (UID: "10d51c83-0754-4e1a-a39f-de83ea48bf7b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.065520 4799 scope.go:117] "RemoveContainer" containerID="2889ffafe54d84b1bd3b627bbe9e22685eb8dcfb52fe9d59055fac566921c971" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.069864 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqh8v\" (UniqueName: \"kubernetes.io/projected/5cb24916-faef-4a1c-8e2c-c51d108d915e-kube-api-access-lqh8v\") pod \"redhat-marketplace-f9tnv\" (UID: \"5cb24916-faef-4a1c-8e2c-c51d108d915e\") " pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.084515 4799 scope.go:117] "RemoveContainer" containerID="63fef1d3497f2846f928ab9c25141b3f4880b8afb782398ea0220eb06e79e4f3" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.103022 4799 scope.go:117] "RemoveContainer" containerID="982414733ba23d2d420412d9f4185d3c9f0e0b79e35ff6b869ff0045b47dc2a2" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.128178 4799 scope.go:117] "RemoveContainer" containerID="f69aeff0b61e7bbe316c67315621d7aa5577f8f83067d002fd28a348acc522c7" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.135640 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rzgjt"] Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.139221 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rzgjt"] Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.144200 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10d51c83-0754-4e1a-a39f-de83ea48bf7b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.149190 4799 scope.go:117] "RemoveContainer" containerID="f9c6e2fc324951cbde93977c300e93553ede7063fd5ab935af4587be88b7a7d0" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.166883 4799 scope.go:117] "RemoveContainer" containerID="014f524f4df5adbc43364e6d3e0479200d0e879553db408ed36892c3f2122c58" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.172482 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.184062 4799 scope.go:117] "RemoveContainer" containerID="76881bed8edd53b98da842868dca21297ec004b9841967785b4575f7b7395101" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.199248 4799 scope.go:117] "RemoveContainer" containerID="00959bca44834a71d21d39366577468682ec758788a23cacdfe816f20311f047" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.225529 4799 scope.go:117] "RemoveContainer" containerID="9650594c571ab74427ec889bc274b85b035bbe248fa6ef69e1a583ab1174cbe1" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.253105 4799 scope.go:117] "RemoveContainer" containerID="f1a1d70df6660288056043c348864079c14a39cad6849d02f3f5266aa9777071" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.303688 4799 scope.go:117] "RemoveContainer" containerID="528e0d5716515625678bcc77f0317cdf43d92ebaa6a81a32cca6eb9851c3059a" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.332290 4799 scope.go:117] "RemoveContainer" containerID="6645693638f38ec9faaca5b9de7a454b1d9736346297884af09de306a80ce68b" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.350871 4799 scope.go:117] "RemoveContainer" containerID="55b9815d61ead17298817e2f0805988d69be05ed02438114a47f717032235d6b" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.373095 4799 scope.go:117] "RemoveContainer" containerID="4a3adb7fdc20f816c07096284bfd9a60c036a62057d9e6d388541ecf1977fc60" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.373464 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cfc335e9-4154-4713-a1b7-96f30bdab940" (UID: "cfc335e9-4154-4713-a1b7-96f30bdab940"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.398941 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f9tnv"] Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.406322 4799 scope.go:117] "RemoveContainer" containerID="2e5e8f7cf8b6085ef90b348ffd10e54d061db67d039a27604dc1c49b5259b64b" Jan 21 17:37:19 crc kubenswrapper[4799]: W0121 17:37:19.425947 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cb24916_faef_4a1c_8e2c_c51d108d915e.slice/crio-0191e6e9601c4b7f4927c8c07f80ba2eec9bf5bf6a00ae948dd84717fc37ee9a WatchSource:0}: Error finding container 0191e6e9601c4b7f4927c8c07f80ba2eec9bf5bf6a00ae948dd84717fc37ee9a: Status 404 returned error can't find the container with id 0191e6e9601c4b7f4927c8c07f80ba2eec9bf5bf6a00ae948dd84717fc37ee9a Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.431490 4799 scope.go:117] "RemoveContainer" containerID="935a8b5753828ee263122d0b2ff212e7a23701a1f73c48e8c000014ef3012d40" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.449255 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cfc335e9-4154-4713-a1b7-96f30bdab940-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.449485 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vccfc"] Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.454621 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vccfc"] Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.467945 4799 scope.go:117] "RemoveContainer" containerID="3697fb48318bf9a5c6fafe224205c7c78afb08ba0257bd77e58275cca87b17d3" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.488478 4799 scope.go:117] "RemoveContainer" containerID="320277bc27fd1de47ca7e3e01b4bd6c6972eace8b6347da3bb90efe4c4416f32" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.516955 4799 scope.go:117] "RemoveContainer" containerID="6cc1b442f43d9361767272990f59b2dfa84829cd516565494db84a46501a2067" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.549460 4799 scope.go:117] "RemoveContainer" containerID="071180c6b81bf24824080cd675475a0173dd6e745c6492f96804c44ca4d370fe" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.570713 4799 scope.go:117] "RemoveContainer" containerID="c4302fd9e6c5aafa1ffde4166178ec0c6ab6c5807c15986b62678ee18aa6f492" Jan 21 17:37:19 crc kubenswrapper[4799]: I0121 17:37:19.844999 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9tnv" event={"ID":"5cb24916-faef-4a1c-8e2c-c51d108d915e","Type":"ContainerStarted","Data":"0191e6e9601c4b7f4927c8c07f80ba2eec9bf5bf6a00ae948dd84717fc37ee9a"} Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.222217 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06fabbfb-ca52-4980-9478-5fbe09bca884" path="/var/lib/kubelet/pods/06fabbfb-ca52-4980-9478-5fbe09bca884/volumes" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.223334 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10d51c83-0754-4e1a-a39f-de83ea48bf7b" path="/var/lib/kubelet/pods/10d51c83-0754-4e1a-a39f-de83ea48bf7b/volumes" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.223962 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2145d2a2-e101-44cb-b0c4-4161fbb910f8" path="/var/lib/kubelet/pods/2145d2a2-e101-44cb-b0c4-4161fbb910f8/volumes" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.225200 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a7d46ef-dfda-4602-a004-c26ff4335788" path="/var/lib/kubelet/pods/2a7d46ef-dfda-4602-a004-c26ff4335788/volumes" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.225799 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e7169e9-ed59-4259-bc63-a1079a9412c0" path="/var/lib/kubelet/pods/3e7169e9-ed59-4259-bc63-a1079a9412c0/volumes" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.226940 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a09e09d-8207-4727-9c4e-cea051cb063a" path="/var/lib/kubelet/pods/9a09e09d-8207-4727-9c4e-cea051cb063a/volumes" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.228319 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb30842a-4bc0-4d3d-aa45-ff611e019759" path="/var/lib/kubelet/pods/cb30842a-4bc0-4d3d-aa45-ff611e019759/volumes" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.229169 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfc335e9-4154-4713-a1b7-96f30bdab940" path="/var/lib/kubelet/pods/cfc335e9-4154-4713-a1b7-96f30bdab940/volumes" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.618814 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rf9sq"] Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.620410 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.622749 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.632636 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rf9sq"] Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.765923 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-catalog-content\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.765993 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-utilities\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.766023 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6244\" (UniqueName: \"kubernetes.io/projected/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-kube-api-access-m6244\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.855194 4799 generic.go:334] "Generic (PLEG): container finished" podID="5cb24916-faef-4a1c-8e2c-c51d108d915e" containerID="17db16ed8a6d85137baa7da5691a038173f16f42c7fb016d5658c51808cc32d6" exitCode=0 Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.855348 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9tnv" event={"ID":"5cb24916-faef-4a1c-8e2c-c51d108d915e","Type":"ContainerDied","Data":"17db16ed8a6d85137baa7da5691a038173f16f42c7fb016d5658c51808cc32d6"} Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.866984 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-catalog-content\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.867091 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-utilities\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.867140 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6244\" (UniqueName: \"kubernetes.io/projected/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-kube-api-access-m6244\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.868356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-catalog-content\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.868453 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-utilities\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.899604 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6244\" (UniqueName: \"kubernetes.io/projected/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-kube-api-access-m6244\") pod \"certified-operators-rf9sq\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:20 crc kubenswrapper[4799]: I0121 17:37:20.945611 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.434412 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rf9sq"] Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.627442 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p6ls8"] Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.630304 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.636057 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p6ls8"] Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.636210 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.830924 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57d3c4d8-2186-406a-bac8-d3b062232299-catalog-content\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.831099 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdshb\" (UniqueName: \"kubernetes.io/projected/57d3c4d8-2186-406a-bac8-d3b062232299-kube-api-access-fdshb\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.831222 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57d3c4d8-2186-406a-bac8-d3b062232299-utilities\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.864164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf9sq" event={"ID":"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac","Type":"ContainerStarted","Data":"652d0b009b7dededdcdc1d9b634ada2fd6927e0fcb586fa3c49f3cc165aed858"} Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.932152 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57d3c4d8-2186-406a-bac8-d3b062232299-catalog-content\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.932212 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdshb\" (UniqueName: \"kubernetes.io/projected/57d3c4d8-2186-406a-bac8-d3b062232299-kube-api-access-fdshb\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.932258 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57d3c4d8-2186-406a-bac8-d3b062232299-utilities\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.932896 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57d3c4d8-2186-406a-bac8-d3b062232299-utilities\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.933368 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57d3c4d8-2186-406a-bac8-d3b062232299-catalog-content\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.951713 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdshb\" (UniqueName: \"kubernetes.io/projected/57d3c4d8-2186-406a-bac8-d3b062232299-kube-api-access-fdshb\") pod \"redhat-operators-p6ls8\" (UID: \"57d3c4d8-2186-406a-bac8-d3b062232299\") " pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:21 crc kubenswrapper[4799]: I0121 17:37:21.960293 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:22 crc kubenswrapper[4799]: I0121 17:37:22.343725 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p6ls8"] Jan 21 17:37:22 crc kubenswrapper[4799]: W0121 17:37:22.354243 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57d3c4d8_2186_406a_bac8_d3b062232299.slice/crio-87b945d24bef6abc9d7340638ba3acef0ccbe073049528ab4f8a403a2ebcfd20 WatchSource:0}: Error finding container 87b945d24bef6abc9d7340638ba3acef0ccbe073049528ab4f8a403a2ebcfd20: Status 404 returned error can't find the container with id 87b945d24bef6abc9d7340638ba3acef0ccbe073049528ab4f8a403a2ebcfd20 Jan 21 17:37:22 crc kubenswrapper[4799]: I0121 17:37:22.871208 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6ls8" event={"ID":"57d3c4d8-2186-406a-bac8-d3b062232299","Type":"ContainerStarted","Data":"87b945d24bef6abc9d7340638ba3acef0ccbe073049528ab4f8a403a2ebcfd20"} Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.023895 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fr4rq"] Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.025497 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.028093 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.038310 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fr4rq"] Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.149879 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs896\" (UniqueName: \"kubernetes.io/projected/1da92736-ae07-4de0-b2a0-2f2fec07749a-kube-api-access-hs896\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.149977 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da92736-ae07-4de0-b2a0-2f2fec07749a-catalog-content\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.150002 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da92736-ae07-4de0-b2a0-2f2fec07749a-utilities\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.250861 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs896\" (UniqueName: \"kubernetes.io/projected/1da92736-ae07-4de0-b2a0-2f2fec07749a-kube-api-access-hs896\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.251417 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da92736-ae07-4de0-b2a0-2f2fec07749a-catalog-content\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.252067 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da92736-ae07-4de0-b2a0-2f2fec07749a-catalog-content\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.252165 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da92736-ae07-4de0-b2a0-2f2fec07749a-utilities\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.252161 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da92736-ae07-4de0-b2a0-2f2fec07749a-utilities\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.271808 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs896\" (UniqueName: \"kubernetes.io/projected/1da92736-ae07-4de0-b2a0-2f2fec07749a-kube-api-access-hs896\") pod \"community-operators-fr4rq\" (UID: \"1da92736-ae07-4de0-b2a0-2f2fec07749a\") " pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.350304 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.584094 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fr4rq"] Jan 21 17:37:23 crc kubenswrapper[4799]: W0121 17:37:23.607145 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1da92736_ae07_4de0_b2a0_2f2fec07749a.slice/crio-8e276fdb581140e90b2fda581bc747db7df654bec8434a51a8870490b11f9400 WatchSource:0}: Error finding container 8e276fdb581140e90b2fda581bc747db7df654bec8434a51a8870490b11f9400: Status 404 returned error can't find the container with id 8e276fdb581140e90b2fda581bc747db7df654bec8434a51a8870490b11f9400 Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.882654 4799 generic.go:334] "Generic (PLEG): container finished" podID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerID="a286b028acb7f6693ff516df3b6636eb8f785888c026d36173802d5039fda2e9" exitCode=0 Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.883027 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf9sq" event={"ID":"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac","Type":"ContainerDied","Data":"a286b028acb7f6693ff516df3b6636eb8f785888c026d36173802d5039fda2e9"} Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.884939 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr4rq" event={"ID":"1da92736-ae07-4de0-b2a0-2f2fec07749a","Type":"ContainerStarted","Data":"8e276fdb581140e90b2fda581bc747db7df654bec8434a51a8870490b11f9400"} Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.886923 4799 generic.go:334] "Generic (PLEG): container finished" podID="57d3c4d8-2186-406a-bac8-d3b062232299" containerID="258c61522adc1dbb218727bbaf752ea3508271df5a566aa31ff277d87dba34e1" exitCode=0 Jan 21 17:37:23 crc kubenswrapper[4799]: I0121 17:37:23.886967 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6ls8" event={"ID":"57d3c4d8-2186-406a-bac8-d3b062232299","Type":"ContainerDied","Data":"258c61522adc1dbb218727bbaf752ea3508271df5a566aa31ff277d87dba34e1"} Jan 21 17:37:24 crc kubenswrapper[4799]: I0121 17:37:24.900402 4799 generic.go:334] "Generic (PLEG): container finished" podID="1da92736-ae07-4de0-b2a0-2f2fec07749a" containerID="dfc0eeee473f43874339b7feaeca9a6e479b0d75950f3e7c5634c34d209765e7" exitCode=0 Jan 21 17:37:24 crc kubenswrapper[4799]: I0121 17:37:24.900889 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr4rq" event={"ID":"1da92736-ae07-4de0-b2a0-2f2fec07749a","Type":"ContainerDied","Data":"dfc0eeee473f43874339b7feaeca9a6e479b0d75950f3e7c5634c34d209765e7"} Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.295971 4799 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.296951 4799 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.297119 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.297457 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4" gracePeriod=15 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.297591 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05" gracePeriod=15 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.297471 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d" gracePeriod=15 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.297539 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194" gracePeriod=15 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.297529 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616" gracePeriod=15 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298445 4799 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.298623 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298650 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.298664 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298673 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.298685 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298691 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.298700 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298708 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.298717 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298723 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.298733 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298740 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.298752 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298761 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298878 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298891 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298904 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298913 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298921 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298930 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.298939 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.299066 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.299074 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.384738 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.384861 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.384924 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.384953 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.384998 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.385032 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.385080 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.385120 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.385673 4799 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.177:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: E0121 17:37:25.477456 4799 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.177:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-fr4rq.188ccfa15533855a openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-fr4rq,UID:1da92736-ae07-4de0-b2a0-2f2fec07749a,APIVersion:v1,ResourceVersion:29606,FieldPath:spec.initContainers{extract-content},},Reason:Pulled,Message:Successfully pulled image \"registry.redhat.io/redhat/community-operator-index:v4.18\" in 573ms (573ms including waiting). Image size: 1202744046 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 17:37:25.475886426 +0000 UTC m=+272.102176449,LastTimestamp:2026-01-21 17:37:25.475886426 +0000 UTC m=+272.102176449,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486622 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486674 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486721 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486766 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486760 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486793 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486839 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486851 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486898 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486880 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486913 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.486955 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.487031 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.487091 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.487177 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.487196 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.687550 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:25 crc kubenswrapper[4799]: W0121 17:37:25.714803 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-b6a5253cd4fa4dae20dca326bbbb3798ef6d7d226b9f45d1bd6161d45f06a497 WatchSource:0}: Error finding container b6a5253cd4fa4dae20dca326bbbb3798ef6d7d226b9f45d1bd6161d45f06a497: Status 404 returned error can't find the container with id b6a5253cd4fa4dae20dca326bbbb3798ef6d7d226b9f45d1bd6161d45f06a497 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.909378 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr4rq" event={"ID":"1da92736-ae07-4de0-b2a0-2f2fec07749a","Type":"ContainerStarted","Data":"f20c6afa391d7abe5478fd6539ac0a14384104107d11a6f7968542298200ee56"} Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.910350 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.910527 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.911023 4799 generic.go:334] "Generic (PLEG): container finished" podID="57d3c4d8-2186-406a-bac8-d3b062232299" containerID="d21a4a592f5558ab560bc6e9d283c506a8cbe4c728059b734f00f0612a29e59c" exitCode=0 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.911061 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6ls8" event={"ID":"57d3c4d8-2186-406a-bac8-d3b062232299","Type":"ContainerDied","Data":"d21a4a592f5558ab560bc6e9d283c506a8cbe4c728059b734f00f0612a29e59c"} Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.911982 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.912256 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.912468 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.913915 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"b6a5253cd4fa4dae20dca326bbbb3798ef6d7d226b9f45d1bd6161d45f06a497"} Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.917784 4799 generic.go:334] "Generic (PLEG): container finished" podID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" containerID="ac578f8f1d1742acb482e78688867dd62e41148df40d044be5f06278e3420225" exitCode=0 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.917906 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"3c270f61-528f-4ab0-a8a9-46efc3c85b3a","Type":"ContainerDied","Data":"ac578f8f1d1742acb482e78688867dd62e41148df40d044be5f06278e3420225"} Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.918742 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.919119 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.919323 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.919838 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.921192 4799 generic.go:334] "Generic (PLEG): container finished" podID="5cb24916-faef-4a1c-8e2c-c51d108d915e" containerID="ea92a05b9f4e02fba45b1787f54a2edbb74d69bdbc89ec99aa0e346c2849dfaa" exitCode=0 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.921260 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9tnv" event={"ID":"5cb24916-faef-4a1c-8e2c-c51d108d915e","Type":"ContainerDied","Data":"ea92a05b9f4e02fba45b1787f54a2edbb74d69bdbc89ec99aa0e346c2849dfaa"} Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.921821 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.922174 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.922779 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.923571 4799 generic.go:334] "Generic (PLEG): container finished" podID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerID="225709d76422d5e6eb054ef11d56dbb4703f27b61c27c82f2f7e48b880b81c47" exitCode=0 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.923592 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.923641 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf9sq" event={"ID":"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac","Type":"ContainerDied","Data":"225709d76422d5e6eb054ef11d56dbb4703f27b61c27c82f2f7e48b880b81c47"} Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.923875 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.924121 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.924466 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.924746 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.925105 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.925505 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.925808 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.926601 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.929894 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.930791 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4" exitCode=0 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.930822 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194" exitCode=0 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.930843 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05" exitCode=0 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.930862 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616" exitCode=2 Jan 21 17:37:25 crc kubenswrapper[4799]: I0121 17:37:25.930883 4799 scope.go:117] "RemoveContainer" containerID="0dd2f667ae8064222d78818077b35d1b5512f743cb08730c325786af4745d8d0" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.948064 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.953639 4799 generic.go:334] "Generic (PLEG): container finished" podID="1da92736-ae07-4de0-b2a0-2f2fec07749a" containerID="f20c6afa391d7abe5478fd6539ac0a14384104107d11a6f7968542298200ee56" exitCode=0 Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.953847 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr4rq" event={"ID":"1da92736-ae07-4de0-b2a0-2f2fec07749a","Type":"ContainerDied","Data":"f20c6afa391d7abe5478fd6539ac0a14384104107d11a6f7968542298200ee56"} Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.954917 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.955268 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.957340 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.957916 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.958807 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6ls8" event={"ID":"57d3c4d8-2186-406a-bac8-d3b062232299","Type":"ContainerStarted","Data":"3bd76754ced3d35429db3043388d937a86a6ac507c65a5e553162eaf2089855a"} Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.959918 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.960986 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4"} Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.961329 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: E0121 17:37:26.961819 4799 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.177:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.961856 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.962248 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.962534 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.962774 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.963176 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.963415 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.963647 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.963898 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.965021 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.966439 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f9tnv" event={"ID":"5cb24916-faef-4a1c-8e2c-c51d108d915e","Type":"ContainerStarted","Data":"05b5978933f57a4702b3e2b5c771c95c6bd2be7cd9ec8810b09ce46596f3aa52"} Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.967275 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.967684 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.968590 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.968981 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.969514 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.970631 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf9sq" event={"ID":"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac","Type":"ContainerStarted","Data":"4e144aeae3c67f438e600863035b360fc5e82ca00c7098b1101545a0d22a6f01"} Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.971805 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.972208 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.972638 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.973427 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:26 crc kubenswrapper[4799]: I0121 17:37:26.973734 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.271602 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.272658 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.273163 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.273683 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.274019 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.274259 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.316348 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-var-lock\") pod \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.316445 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kubelet-dir\") pod \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.316493 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-var-lock" (OuterVolumeSpecName: "var-lock") pod "3c270f61-528f-4ab0-a8a9-46efc3c85b3a" (UID: "3c270f61-528f-4ab0-a8a9-46efc3c85b3a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.316576 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "3c270f61-528f-4ab0-a8a9-46efc3c85b3a" (UID: "3c270f61-528f-4ab0-a8a9-46efc3c85b3a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.316633 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kube-api-access\") pod \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\" (UID: \"3c270f61-528f-4ab0-a8a9-46efc3c85b3a\") " Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.317094 4799 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.317117 4799 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.325341 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "3c270f61-528f-4ab0-a8a9-46efc3c85b3a" (UID: "3c270f61-528f-4ab0-a8a9-46efc3c85b3a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.419144 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3c270f61-528f-4ab0-a8a9-46efc3c85b3a-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.773696 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.775543 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.776472 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.777091 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.777504 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.777793 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.778063 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.778376 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.825877 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.825956 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.826012 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.826096 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.826220 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.826220 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.826529 4799 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.826554 4799 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.826569 4799 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.982943 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fr4rq" event={"ID":"1da92736-ae07-4de0-b2a0-2f2fec07749a","Type":"ContainerStarted","Data":"4c5de3d5fb0cb868ace99d9577a492e2022f057706198657d6db0f66d48b91d4"} Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.985472 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.985949 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.986175 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.986405 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.986602 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.986989 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.988057 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.988083 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"3c270f61-528f-4ab0-a8a9-46efc3c85b3a","Type":"ContainerDied","Data":"236cc45730c5084c4cfaf4b3a5009b3903448993180101146b8a729e388fcae8"} Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.988157 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="236cc45730c5084c4cfaf4b3a5009b3903448993180101146b8a729e388fcae8" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.990866 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.991822 4799 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d" exitCode=0 Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.993045 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:27 crc kubenswrapper[4799]: I0121 17:37:27.997370 4799 scope.go:117] "RemoveContainer" containerID="7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4" Jan 21 17:37:28 crc kubenswrapper[4799]: E0121 17:37:27.999812 4799 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.177:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.013065 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.014882 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.015266 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.015628 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.016056 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.016541 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.016818 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.016977 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.017147 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.017294 4799 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.017629 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.017828 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.018961 4799 scope.go:117] "RemoveContainer" containerID="ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.046845 4799 scope.go:117] "RemoveContainer" containerID="9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05" Jan 21 17:37:28 crc kubenswrapper[4799]: E0121 17:37:28.050638 4799 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.177:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-fr4rq.188ccfa15533855a openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-fr4rq,UID:1da92736-ae07-4de0-b2a0-2f2fec07749a,APIVersion:v1,ResourceVersion:29606,FieldPath:spec.initContainers{extract-content},},Reason:Pulled,Message:Successfully pulled image \"registry.redhat.io/redhat/community-operator-index:v4.18\" in 573ms (573ms including waiting). Image size: 1202744046 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 17:37:25.475886426 +0000 UTC m=+272.102176449,LastTimestamp:2026-01-21 17:37:25.475886426 +0000 UTC m=+272.102176449,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.067827 4799 scope.go:117] "RemoveContainer" containerID="abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.089357 4799 scope.go:117] "RemoveContainer" containerID="9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.107287 4799 scope.go:117] "RemoveContainer" containerID="3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.130484 4799 scope.go:117] "RemoveContainer" containerID="7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4" Jan 21 17:37:28 crc kubenswrapper[4799]: E0121 17:37:28.131074 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\": container with ID starting with 7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4 not found: ID does not exist" containerID="7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.131136 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4"} err="failed to get container status \"7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\": rpc error: code = NotFound desc = could not find container \"7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4\": container with ID starting with 7528610ed2aaed02136a3e59e18d7aedb317b46c6651920873e001e0afde21e4 not found: ID does not exist" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.131171 4799 scope.go:117] "RemoveContainer" containerID="ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194" Jan 21 17:37:28 crc kubenswrapper[4799]: E0121 17:37:28.131417 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\": container with ID starting with ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194 not found: ID does not exist" containerID="ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.131443 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194"} err="failed to get container status \"ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\": rpc error: code = NotFound desc = could not find container \"ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194\": container with ID starting with ee96b4c918944edae8bcaa92190bfa41b053866109267a647958bf32e1c11194 not found: ID does not exist" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.131460 4799 scope.go:117] "RemoveContainer" containerID="9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05" Jan 21 17:37:28 crc kubenswrapper[4799]: E0121 17:37:28.131686 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\": container with ID starting with 9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05 not found: ID does not exist" containerID="9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.131708 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05"} err="failed to get container status \"9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\": rpc error: code = NotFound desc = could not find container \"9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05\": container with ID starting with 9a197d054cc0da5e2e3aaea1f4a14bf6f821c3c6342e9410b2d6ecf1a5519b05 not found: ID does not exist" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.131729 4799 scope.go:117] "RemoveContainer" containerID="abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616" Jan 21 17:37:28 crc kubenswrapper[4799]: E0121 17:37:28.132044 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\": container with ID starting with abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616 not found: ID does not exist" containerID="abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.132114 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616"} err="failed to get container status \"abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\": rpc error: code = NotFound desc = could not find container \"abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616\": container with ID starting with abb10d9318ef0633cbb2df76a4676da10955ea8040f84ada72c5991caa22b616 not found: ID does not exist" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.132496 4799 scope.go:117] "RemoveContainer" containerID="9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d" Jan 21 17:37:28 crc kubenswrapper[4799]: E0121 17:37:28.132943 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\": container with ID starting with 9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d not found: ID does not exist" containerID="9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.132988 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d"} err="failed to get container status \"9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\": rpc error: code = NotFound desc = could not find container \"9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d\": container with ID starting with 9a156f87b4fb364c0344209b57ce3a67dab0f82dc4360bed395bb470a577184d not found: ID does not exist" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.133013 4799 scope.go:117] "RemoveContainer" containerID="3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056" Jan 21 17:37:28 crc kubenswrapper[4799]: E0121 17:37:28.133342 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\": container with ID starting with 3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056 not found: ID does not exist" containerID="3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.133379 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056"} err="failed to get container status \"3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\": rpc error: code = NotFound desc = could not find container \"3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056\": container with ID starting with 3b43feba6a265ad13cd4c76474b25a54f7e39dd37f6b014eadf2e5c051664056 not found: ID does not exist" Jan 21 17:37:28 crc kubenswrapper[4799]: I0121 17:37:28.212812 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 21 17:37:29 crc kubenswrapper[4799]: I0121 17:37:29.173077 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:29 crc kubenswrapper[4799]: I0121 17:37:29.176020 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:29 crc kubenswrapper[4799]: I0121 17:37:29.235812 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:29 crc kubenswrapper[4799]: I0121 17:37:29.236724 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:29 crc kubenswrapper[4799]: I0121 17:37:29.237431 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:29 crc kubenswrapper[4799]: I0121 17:37:29.237981 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:29 crc kubenswrapper[4799]: I0121 17:37:29.238249 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:29 crc kubenswrapper[4799]: I0121 17:37:29.238521 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:30 crc kubenswrapper[4799]: E0121 17:37:30.300573 4799 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.177:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" volumeName="registry-storage" Jan 21 17:37:30 crc kubenswrapper[4799]: I0121 17:37:30.946829 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:30 crc kubenswrapper[4799]: I0121 17:37:30.946906 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:30 crc kubenswrapper[4799]: I0121 17:37:30.997258 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:30 crc kubenswrapper[4799]: I0121 17:37:30.997961 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:30 crc kubenswrapper[4799]: I0121 17:37:30.999323 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.000323 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.001242 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.001569 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.072035 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f9tnv" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.072732 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.072929 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.073288 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.073726 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.073963 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.080064 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.080403 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.080635 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.080950 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.081351 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.081627 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.961249 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:31 crc kubenswrapper[4799]: I0121 17:37:31.961649 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:32 crc kubenswrapper[4799]: E0121 17:37:32.944147 4799 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:32 crc kubenswrapper[4799]: E0121 17:37:32.945250 4799 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:32 crc kubenswrapper[4799]: E0121 17:37:32.945897 4799 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:32 crc kubenswrapper[4799]: E0121 17:37:32.946307 4799 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:32 crc kubenswrapper[4799]: E0121 17:37:32.946620 4799 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:32 crc kubenswrapper[4799]: I0121 17:37:32.946678 4799 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 21 17:37:32 crc kubenswrapper[4799]: E0121 17:37:32.946972 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="200ms" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.004680 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p6ls8" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" containerName="registry-server" probeResult="failure" output=< Jan 21 17:37:33 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 17:37:33 crc kubenswrapper[4799]: > Jan 21 17:37:33 crc kubenswrapper[4799]: E0121 17:37:33.148033 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="400ms" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.351483 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.351543 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.395403 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.396194 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.396639 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.397264 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.397579 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:33 crc kubenswrapper[4799]: I0121 17:37:33.397908 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:33 crc kubenswrapper[4799]: E0121 17:37:33.549819 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="800ms" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.087792 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fr4rq" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.088780 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.089307 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.089842 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.090259 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.090579 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.106192 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" containerName="oauth-openshift" containerID="cri-o://1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143" gracePeriod=15 Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.209204 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.211025 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.211319 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.211819 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.212319 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: E0121 17:37:34.351365 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="1.6s" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.536302 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.536887 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.537072 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.537281 4799 status_manager.go:851] "Failed to get status for pod" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-96sxw\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.537780 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.539362 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.540540 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.570162 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-cliconfig\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.570236 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-serving-cert\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.570281 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-idp-0-file-data\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.570308 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-dir\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.570347 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-session\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.570856 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.571940 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.577535 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.578053 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.578224 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.670964 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-trusted-ca-bundle\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671026 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-policies\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671367 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-service-ca\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671824 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-login\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671738 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671752 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671819 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671899 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-ocp-branding-template\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671929 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-router-certs\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.671985 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-error\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672027 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-provider-selection\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672077 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mghkv\" (UniqueName: \"kubernetes.io/projected/d06abe7d-735c-46b1-b98a-f7ef020fe863-kube-api-access-mghkv\") pod \"d06abe7d-735c-46b1-b98a-f7ef020fe863\" (UID: \"d06abe7d-735c-46b1-b98a-f7ef020fe863\") " Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672337 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672349 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672357 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672367 4799 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672382 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672394 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672404 4799 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.672412 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.675609 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.676707 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d06abe7d-735c-46b1-b98a-f7ef020fe863-kube-api-access-mghkv" (OuterVolumeSpecName: "kube-api-access-mghkv") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "kube-api-access-mghkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.676737 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.678180 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.678804 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.678866 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "d06abe7d-735c-46b1-b98a-f7ef020fe863" (UID: "d06abe7d-735c-46b1-b98a-f7ef020fe863"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.773736 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.773775 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.773785 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.773800 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.773812 4799 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d06abe7d-735c-46b1-b98a-f7ef020fe863-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:34 crc kubenswrapper[4799]: I0121 17:37:34.773826 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mghkv\" (UniqueName: \"kubernetes.io/projected/d06abe7d-735c-46b1-b98a-f7ef020fe863-kube-api-access-mghkv\") on node \"crc\" DevicePath \"\"" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.061788 4799 generic.go:334] "Generic (PLEG): container finished" podID="d06abe7d-735c-46b1-b98a-f7ef020fe863" containerID="1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143" exitCode=0 Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.062259 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.062285 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" event={"ID":"d06abe7d-735c-46b1-b98a-f7ef020fe863","Type":"ContainerDied","Data":"1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143"} Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.062393 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" event={"ID":"d06abe7d-735c-46b1-b98a-f7ef020fe863","Type":"ContainerDied","Data":"aad5b442667862585fc8652898487927970ec0109ab1ef2aa2e8bcb6aa8700a3"} Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.062421 4799 scope.go:117] "RemoveContainer" containerID="1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.063280 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.063525 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.064540 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.064865 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.065234 4799 status_manager.go:851] "Failed to get status for pod" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-96sxw\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.065785 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.081150 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.081867 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.082900 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.083379 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.083669 4799 status_manager.go:851] "Failed to get status for pod" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-96sxw\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.083976 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.090072 4799 scope.go:117] "RemoveContainer" containerID="1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143" Jan 21 17:37:35 crc kubenswrapper[4799]: E0121 17:37:35.091054 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143\": container with ID starting with 1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143 not found: ID does not exist" containerID="1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143" Jan 21 17:37:35 crc kubenswrapper[4799]: I0121 17:37:35.091203 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143"} err="failed to get container status \"1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143\": rpc error: code = NotFound desc = could not find container \"1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143\": container with ID starting with 1532c98d202b55ede78435175a95344e0f17e67da30f259ded513b15a7a6a143 not found: ID does not exist" Jan 21 17:37:35 crc kubenswrapper[4799]: E0121 17:37:35.953363 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="3.2s" Jan 21 17:37:38 crc kubenswrapper[4799]: E0121 17:37:38.055068 4799 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.177:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-fr4rq.188ccfa15533855a openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-fr4rq,UID:1da92736-ae07-4de0-b2a0-2f2fec07749a,APIVersion:v1,ResourceVersion:29606,FieldPath:spec.initContainers{extract-content},},Reason:Pulled,Message:Successfully pulled image \"registry.redhat.io/redhat/community-operator-index:v4.18\" in 573ms (573ms including waiting). Image size: 1202744046 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-21 17:37:25.475886426 +0000 UTC m=+272.102176449,LastTimestamp:2026-01-21 17:37:25.475886426 +0000 UTC m=+272.102176449,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 21 17:37:38 crc kubenswrapper[4799]: I0121 17:37:38.489730 4799 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Readiness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 21 17:37:38 crc kubenswrapper[4799]: I0121 17:37:38.489912 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 21 17:37:39 crc kubenswrapper[4799]: E0121 17:37:39.154987 4799 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="6.4s" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.204758 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.207391 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.210961 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.211651 4799 status_manager.go:851] "Failed to get status for pod" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-96sxw\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.211852 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.212034 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.212206 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.222238 4799 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.222311 4799 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:39 crc kubenswrapper[4799]: E0121 17:37:39.223093 4799 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:39 crc kubenswrapper[4799]: I0121 17:37:39.224067 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.092194 4799 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="cb5ed36eb4ca53273b6d4944761d9f4969402b88de82c305a3b746305cc01a0e" exitCode=0 Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.092302 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"cb5ed36eb4ca53273b6d4944761d9f4969402b88de82c305a3b746305cc01a0e"} Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.092653 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"65dd152e7ef21022370a5a3c7b2cb91ce14dd2acc39785618f8de957cfc7a3bb"} Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.093008 4799 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.093027 4799 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:40 crc kubenswrapper[4799]: E0121 17:37:40.093697 4799 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.093714 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.094139 4799 status_manager.go:851] "Failed to get status for pod" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-96sxw\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.094699 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.095021 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.095342 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.095588 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.097426 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.097480 4799 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194" exitCode=1 Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.097515 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194"} Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.097977 4799 scope.go:117] "RemoveContainer" containerID="d0c0d99cd63e1a0ac20cafa6c32681f4640e06697b6f6bc447d91f458814c194" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.098336 4799 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.098670 4799 status_manager.go:851] "Failed to get status for pod" podUID="5cb24916-faef-4a1c-8e2c-c51d108d915e" pod="openshift-marketplace/redhat-marketplace-f9tnv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-f9tnv\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.098908 4799 status_manager.go:851] "Failed to get status for pod" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.099243 4799 status_manager.go:851] "Failed to get status for pod" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" pod="openshift-authentication/oauth-openshift-558db77b4-96sxw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-96sxw\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.099492 4799 status_manager.go:851] "Failed to get status for pod" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" pod="openshift-marketplace/certified-operators-rf9sq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-rf9sq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.099704 4799 status_manager.go:851] "Failed to get status for pod" podUID="57d3c4d8-2186-406a-bac8-d3b062232299" pod="openshift-marketplace/redhat-operators-p6ls8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-p6ls8\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:40 crc kubenswrapper[4799]: I0121 17:37:40.099914 4799 status_manager.go:851] "Failed to get status for pod" podUID="1da92736-ae07-4de0-b2a0-2f2fec07749a" pod="openshift-marketplace/community-operators-fr4rq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fr4rq\": dial tcp 38.102.83.177:6443: connect: connection refused" Jan 21 17:37:41 crc kubenswrapper[4799]: I0121 17:37:41.107078 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f735f06e8821f6531b11e4f8e47c31f5570561c68c3943b5bfe082f7cbcdd9a0"} Jan 21 17:37:41 crc kubenswrapper[4799]: I0121 17:37:41.107156 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4327170f9a714405d9e723a091db6a6ace6d9035228aae1c3727e26b06dabd24"} Jan 21 17:37:41 crc kubenswrapper[4799]: I0121 17:37:41.107173 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"94d39d98797678ba9864636982127a450f995a94ea821a30f86edd63d65b5cf3"} Jan 21 17:37:41 crc kubenswrapper[4799]: I0121 17:37:41.107186 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2e0bf65d31dac59ffd987c902a56879b8a108efdd097384e1d95643d15ce7985"} Jan 21 17:37:41 crc kubenswrapper[4799]: I0121 17:37:41.117019 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 21 17:37:41 crc kubenswrapper[4799]: I0121 17:37:41.117085 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5ce4734e8ca46ad0266e6e8fa3bf95aa76733120f50bc2af01c5af9c728023c7"} Jan 21 17:37:41 crc kubenswrapper[4799]: I0121 17:37:41.769042 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:37:41 crc kubenswrapper[4799]: I0121 17:37:41.772707 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:37:42 crc kubenswrapper[4799]: I0121 17:37:42.017799 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:42 crc kubenswrapper[4799]: I0121 17:37:42.063029 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p6ls8" Jan 21 17:37:42 crc kubenswrapper[4799]: I0121 17:37:42.127556 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3179bd1fedd6877bd6a003ff23346d70b2cb93d57089ee6da2895abf8f5280c3"} Jan 21 17:37:42 crc kubenswrapper[4799]: I0121 17:37:42.127961 4799 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:42 crc kubenswrapper[4799]: I0121 17:37:42.127989 4799 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:42 crc kubenswrapper[4799]: I0121 17:37:42.128098 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:37:44 crc kubenswrapper[4799]: I0121 17:37:44.224431 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:44 crc kubenswrapper[4799]: I0121 17:37:44.224853 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:44 crc kubenswrapper[4799]: I0121 17:37:44.232117 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:47 crc kubenswrapper[4799]: I0121 17:37:47.140307 4799 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:47 crc kubenswrapper[4799]: I0121 17:37:47.166463 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:47 crc kubenswrapper[4799]: I0121 17:37:47.166517 4799 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:47 crc kubenswrapper[4799]: I0121 17:37:47.166551 4799 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:47 crc kubenswrapper[4799]: I0121 17:37:47.172295 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:47 crc kubenswrapper[4799]: I0121 17:37:47.204696 4799 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7cc948be-a978-4309-9507-ebcfae64f583" Jan 21 17:37:48 crc kubenswrapper[4799]: I0121 17:37:48.174263 4799 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:48 crc kubenswrapper[4799]: I0121 17:37:48.174305 4799 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:48 crc kubenswrapper[4799]: I0121 17:37:48.177824 4799 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7cc948be-a978-4309-9507-ebcfae64f583" Jan 21 17:37:49 crc kubenswrapper[4799]: I0121 17:37:49.182650 4799 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:49 crc kubenswrapper[4799]: I0121 17:37:49.183033 4799 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="482472d7-f466-4155-8743-5469a2d218cc" Jan 21 17:37:49 crc kubenswrapper[4799]: I0121 17:37:49.191665 4799 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7cc948be-a978-4309-9507-ebcfae64f583" Jan 21 17:37:53 crc kubenswrapper[4799]: I0121 17:37:53.700606 4799 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 21 17:37:57 crc kubenswrapper[4799]: I0121 17:37:57.462489 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 21 17:37:57 crc kubenswrapper[4799]: I0121 17:37:57.508809 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 21 17:37:57 crc kubenswrapper[4799]: I0121 17:37:57.563854 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 21 17:37:57 crc kubenswrapper[4799]: I0121 17:37:57.612948 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 21 17:37:57 crc kubenswrapper[4799]: I0121 17:37:57.756662 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 21 17:37:57 crc kubenswrapper[4799]: I0121 17:37:57.965019 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 21 17:37:58 crc kubenswrapper[4799]: I0121 17:37:58.167099 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 21 17:37:58 crc kubenswrapper[4799]: I0121 17:37:58.376933 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 21 17:37:58 crc kubenswrapper[4799]: I0121 17:37:58.493678 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 21 17:37:58 crc kubenswrapper[4799]: I0121 17:37:58.721856 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 21 17:37:58 crc kubenswrapper[4799]: I0121 17:37:58.759680 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 21 17:37:58 crc kubenswrapper[4799]: I0121 17:37:58.805294 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 21 17:37:58 crc kubenswrapper[4799]: I0121 17:37:58.868023 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.075594 4799 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.077524 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fr4rq" podStartSLOduration=33.631364695 podStartE2EDuration="36.077498254s" podCreationTimestamp="2026-01-21 17:37:23 +0000 UTC" firstStartedPulling="2026-01-21 17:37:24.902277583 +0000 UTC m=+271.528567606" lastFinishedPulling="2026-01-21 17:37:27.348411142 +0000 UTC m=+273.974701165" observedRunningTime="2026-01-21 17:37:46.956215731 +0000 UTC m=+293.582505784" watchObservedRunningTime="2026-01-21 17:37:59.077498254 +0000 UTC m=+305.703788277" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.078643 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f9tnv" podStartSLOduration=35.613215005 podStartE2EDuration="41.078637588s" podCreationTimestamp="2026-01-21 17:37:18 +0000 UTC" firstStartedPulling="2026-01-21 17:37:20.857487576 +0000 UTC m=+267.483777599" lastFinishedPulling="2026-01-21 17:37:26.322910159 +0000 UTC m=+272.949200182" observedRunningTime="2026-01-21 17:37:47.048385468 +0000 UTC m=+293.674675511" watchObservedRunningTime="2026-01-21 17:37:59.078637588 +0000 UTC m=+305.704927611" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.078997 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rf9sq" podStartSLOduration=36.641860905 podStartE2EDuration="39.078991489s" podCreationTimestamp="2026-01-21 17:37:20 +0000 UTC" firstStartedPulling="2026-01-21 17:37:23.887266659 +0000 UTC m=+270.513556682" lastFinishedPulling="2026-01-21 17:37:26.324397243 +0000 UTC m=+272.950687266" observedRunningTime="2026-01-21 17:37:47.129405355 +0000 UTC m=+293.755695388" watchObservedRunningTime="2026-01-21 17:37:59.078991489 +0000 UTC m=+305.705281502" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.079503 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p6ls8" podStartSLOduration=35.62922196 podStartE2EDuration="38.079498444s" podCreationTimestamp="2026-01-21 17:37:21 +0000 UTC" firstStartedPulling="2026-01-21 17:37:23.889482064 +0000 UTC m=+270.515772087" lastFinishedPulling="2026-01-21 17:37:26.339758548 +0000 UTC m=+272.966048571" observedRunningTime="2026-01-21 17:37:46.933417066 +0000 UTC m=+293.559707099" watchObservedRunningTime="2026-01-21 17:37:59.079498444 +0000 UTC m=+305.705788467" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.080063 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-96sxw","openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.080155 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.086555 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.100401 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=12.100380661 podStartE2EDuration="12.100380661s" podCreationTimestamp="2026-01-21 17:37:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:37:59.099770243 +0000 UTC m=+305.726060286" watchObservedRunningTime="2026-01-21 17:37:59.100380661 +0000 UTC m=+305.726670684" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.202640 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.336121 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.614330 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.676303 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.699473 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.733894 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.770005 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 21 17:37:59 crc kubenswrapper[4799]: I0121 17:37:59.895214 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.016605 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.037708 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.114348 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.150309 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.211015 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" path="/var/lib/kubelet/pods/d06abe7d-735c-46b1-b98a-f7ef020fe863/volumes" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.254282 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.308311 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.477622 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.679087 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.716494 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.736347 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.752676 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 21 17:38:00 crc kubenswrapper[4799]: I0121 17:38:00.839504 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.003157 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.006775 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.008653 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.023797 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.041480 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.122033 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.133354 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.174466 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.297521 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.389090 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.489951 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.508891 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 21 17:38:01 crc kubenswrapper[4799]: I0121 17:38:01.898689 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.047712 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.200907 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.241382 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.244462 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.249220 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.249977 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.414432 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.462931 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.491764 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.601467 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.603832 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.617821 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.624618 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.661051 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.687545 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.721596 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.739344 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.826892 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.826960 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.845325 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.943265 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 21 17:38:02 crc kubenswrapper[4799]: I0121 17:38:02.995299 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.040471 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.048626 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.084964 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.160014 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.186785 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.242254 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.250421 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.288221 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.296603 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.433679 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.526361 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.742152 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.849083 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.856277 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.892850 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.910380 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.910597 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.911935 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.966957 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.978330 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 21 17:38:03 crc kubenswrapper[4799]: I0121 17:38:03.981369 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.021650 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.071978 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.072154 4799 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.180753 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.201954 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.214973 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.312585 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.355532 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.364951 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.435650 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.441528 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.565322 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.587534 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.658918 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.680339 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.780287 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.787627 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.845414 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.876182 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.890357 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.923493 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.983209 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.993563 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 21 17:38:04 crc kubenswrapper[4799]: I0121 17:38:04.995531 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.065227 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.141210 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.280835 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.392671 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.396998 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.433967 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.441146 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.459085 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.482686 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.512405 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.545180 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.593301 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.629252 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.652852 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.665080 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.710031 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.770960 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.778251 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.810806 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.835508 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.871601 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.876313 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.946968 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.963471 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 21 17:38:05 crc kubenswrapper[4799]: I0121 17:38:05.964379 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.025216 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.048977 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.086708 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.225348 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.226710 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.379705 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.479464 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.550445 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.660301 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.662757 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.665935 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.683786 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.689890 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.752414 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.793263 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.794821 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.817914 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 21 17:38:06 crc kubenswrapper[4799]: I0121 17:38:06.916360 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 21 17:38:07 crc kubenswrapper[4799]: I0121 17:38:07.065454 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 21 17:38:07 crc kubenswrapper[4799]: I0121 17:38:07.119752 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 21 17:38:07 crc kubenswrapper[4799]: I0121 17:38:07.386473 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 21 17:38:07 crc kubenswrapper[4799]: I0121 17:38:07.443933 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 21 17:38:07 crc kubenswrapper[4799]: I0121 17:38:07.618025 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 21 17:38:07 crc kubenswrapper[4799]: I0121 17:38:07.795863 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 17:38:07 crc kubenswrapper[4799]: I0121 17:38:07.841907 4799 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.020028 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.053011 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.136387 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.138224 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.227535 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.405253 4799 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.408885 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.410175 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.410331 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.424873 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.425336 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.433933 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.507167 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.517965 4799 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.548222 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.594330 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.648217 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.710088 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.827393 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.843040 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6fffd54687-w8xsj"] Jan 21 17:38:08 crc kubenswrapper[4799]: E0121 17:38:08.843413 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" containerName="installer" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.843447 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" containerName="installer" Jan 21 17:38:08 crc kubenswrapper[4799]: E0121 17:38:08.843476 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" containerName="oauth-openshift" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.843484 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" containerName="oauth-openshift" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.843674 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c270f61-528f-4ab0-a8a9-46efc3c85b3a" containerName="installer" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.843693 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d06abe7d-735c-46b1-b98a-f7ef020fe863" containerName="oauth-openshift" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.844237 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.848883 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.848899 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.849618 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.849695 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.850051 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.850163 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.850550 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.850761 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.850801 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.850921 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.851063 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.852139 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.861122 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6fffd54687-w8xsj"] Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.864437 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.874245 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.882514 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.909078 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.909491 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-audit-policies\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.909602 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.909777 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsjck\" (UniqueName: \"kubernetes.io/projected/e78b96f7-13af-48ec-8278-54f1b3f777ee-kube-api-access-xsjck\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.909890 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.909993 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.910085 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.910224 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-router-certs\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.910331 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-service-ca\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.910437 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e78b96f7-13af-48ec-8278-54f1b3f777ee-audit-dir\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.910516 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-error\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.910597 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.910691 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-login\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:08 crc kubenswrapper[4799]: I0121 17:38:08.910816 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-session\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012489 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-audit-policies\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012562 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012608 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsjck\" (UniqueName: \"kubernetes.io/projected/e78b96f7-13af-48ec-8278-54f1b3f777ee-kube-api-access-xsjck\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012665 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012689 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012709 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-router-certs\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012747 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-service-ca\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012773 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e78b96f7-13af-48ec-8278-54f1b3f777ee-audit-dir\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012794 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-error\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012848 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012875 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-login\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012919 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-session\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.012958 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.015438 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-audit-policies\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.016363 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.017359 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e78b96f7-13af-48ec-8278-54f1b3f777ee-audit-dir\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.017501 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.017939 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-service-ca\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.020796 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.020849 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-error\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.021548 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-session\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.021704 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.023415 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.024291 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.027257 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-system-router-certs\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.029738 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e78b96f7-13af-48ec-8278-54f1b3f777ee-v4-0-config-user-template-login\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.034047 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsjck\" (UniqueName: \"kubernetes.io/projected/e78b96f7-13af-48ec-8278-54f1b3f777ee-kube-api-access-xsjck\") pod \"oauth-openshift-6fffd54687-w8xsj\" (UID: \"e78b96f7-13af-48ec-8278-54f1b3f777ee\") " pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.163425 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.324198 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.329694 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.444853 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.445665 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6fffd54687-w8xsj"] Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.448414 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.486474 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.672017 4799 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.672432 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4" gracePeriod=5 Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.677263 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.731924 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.764967 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 21 17:38:09 crc kubenswrapper[4799]: I0121 17:38:09.798893 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.075466 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.076891 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.206093 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.231390 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.293380 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.330469 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.335782 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.392374 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.424793 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.429974 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" event={"ID":"e78b96f7-13af-48ec-8278-54f1b3f777ee","Type":"ContainerStarted","Data":"c377115b68a7679fc9024e71ccb2c0fcd7ae23204f3f52b791a7d0f577350dc0"} Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.430029 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" event={"ID":"e78b96f7-13af-48ec-8278-54f1b3f777ee","Type":"ContainerStarted","Data":"4f8e3a3ad9c05796ebd7417cfece0d6042bb88d6ef1b93b074910f81876c7b8d"} Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.430296 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.455882 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" podStartSLOduration=61.455857467 podStartE2EDuration="1m1.455857467s" podCreationTimestamp="2026-01-21 17:37:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:38:10.453744574 +0000 UTC m=+317.080034617" watchObservedRunningTime="2026-01-21 17:38:10.455857467 +0000 UTC m=+317.082147490" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.468849 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.550355 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.637277 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6fffd54687-w8xsj" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.724882 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.801771 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.804118 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.838663 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.916099 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 21 17:38:10 crc kubenswrapper[4799]: I0121 17:38:10.995192 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.016674 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.068441 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.147946 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.358851 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.390712 4799 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.459000 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.504526 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.588250 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.595099 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.638429 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.817333 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.823285 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.922732 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 21 17:38:11 crc kubenswrapper[4799]: I0121 17:38:11.955957 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.038750 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.054220 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.138316 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.162371 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.364650 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.374037 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.479115 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.580687 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 21 17:38:12 crc kubenswrapper[4799]: I0121 17:38:12.917086 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 21 17:38:13 crc kubenswrapper[4799]: I0121 17:38:13.015508 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 21 17:38:13 crc kubenswrapper[4799]: I0121 17:38:13.120707 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 21 17:38:13 crc kubenswrapper[4799]: I0121 17:38:13.193287 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 17:38:13 crc kubenswrapper[4799]: I0121 17:38:13.250153 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 21 17:38:13 crc kubenswrapper[4799]: I0121 17:38:13.661239 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 21 17:38:14 crc kubenswrapper[4799]: I0121 17:38:14.549971 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.249392 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.249498 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.381521 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.381674 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.381721 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.381746 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.381773 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.382706 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.382798 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.382833 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.382867 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.393413 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.460237 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.460319 4799 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4" exitCode=137 Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.460425 4799 scope.go:117] "RemoveContainer" containerID="6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.460650 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.482890 4799 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.484069 4799 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.484162 4799 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.484357 4799 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.484531 4799 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.484639 4799 scope.go:117] "RemoveContainer" containerID="6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4" Jan 21 17:38:15 crc kubenswrapper[4799]: E0121 17:38:15.485612 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4\": container with ID starting with 6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4 not found: ID does not exist" containerID="6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4" Jan 21 17:38:15 crc kubenswrapper[4799]: I0121 17:38:15.485733 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4"} err="failed to get container status \"6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4\": rpc error: code = NotFound desc = could not find container \"6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4\": container with ID starting with 6715a04ffa90fa4177947f9fc9bd9bafec3975cd673149c80ad5350fbfd90db4 not found: ID does not exist" Jan 21 17:38:16 crc kubenswrapper[4799]: I0121 17:38:16.212228 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 21 17:38:35 crc kubenswrapper[4799]: I0121 17:38:35.632424 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wr5c4"] Jan 21 17:38:35 crc kubenswrapper[4799]: I0121 17:38:35.633410 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" podUID="e45b169d-862b-4326-a005-063cead60ac4" containerName="controller-manager" containerID="cri-o://a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe" gracePeriod=30 Jan 21 17:38:35 crc kubenswrapper[4799]: I0121 17:38:35.733717 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj"] Jan 21 17:38:35 crc kubenswrapper[4799]: I0121 17:38:35.734038 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" podUID="9a6e3985-ef4a-451a-90cf-4b313527298c" containerName="route-controller-manager" containerID="cri-o://0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838" gracePeriod=30 Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.150386 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.192712 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.266524 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e45b169d-862b-4326-a005-063cead60ac4-serving-cert\") pod \"e45b169d-862b-4326-a005-063cead60ac4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.266610 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-config\") pod \"e45b169d-862b-4326-a005-063cead60ac4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.266639 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-client-ca\") pod \"e45b169d-862b-4326-a005-063cead60ac4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.266690 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8k4l\" (UniqueName: \"kubernetes.io/projected/e45b169d-862b-4326-a005-063cead60ac4-kube-api-access-q8k4l\") pod \"e45b169d-862b-4326-a005-063cead60ac4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.266805 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-proxy-ca-bundles\") pod \"e45b169d-862b-4326-a005-063cead60ac4\" (UID: \"e45b169d-862b-4326-a005-063cead60ac4\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.268363 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-client-ca" (OuterVolumeSpecName: "client-ca") pod "e45b169d-862b-4326-a005-063cead60ac4" (UID: "e45b169d-862b-4326-a005-063cead60ac4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.268557 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-config" (OuterVolumeSpecName: "config") pod "e45b169d-862b-4326-a005-063cead60ac4" (UID: "e45b169d-862b-4326-a005-063cead60ac4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.268894 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e45b169d-862b-4326-a005-063cead60ac4" (UID: "e45b169d-862b-4326-a005-063cead60ac4"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.274380 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e45b169d-862b-4326-a005-063cead60ac4-kube-api-access-q8k4l" (OuterVolumeSpecName: "kube-api-access-q8k4l") pod "e45b169d-862b-4326-a005-063cead60ac4" (UID: "e45b169d-862b-4326-a005-063cead60ac4"). InnerVolumeSpecName "kube-api-access-q8k4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.274713 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e45b169d-862b-4326-a005-063cead60ac4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e45b169d-862b-4326-a005-063cead60ac4" (UID: "e45b169d-862b-4326-a005-063cead60ac4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.369538 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m27v\" (UniqueName: \"kubernetes.io/projected/9a6e3985-ef4a-451a-90cf-4b313527298c-kube-api-access-7m27v\") pod \"9a6e3985-ef4a-451a-90cf-4b313527298c\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.369761 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-client-ca\") pod \"9a6e3985-ef4a-451a-90cf-4b313527298c\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.369921 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a6e3985-ef4a-451a-90cf-4b313527298c-serving-cert\") pod \"9a6e3985-ef4a-451a-90cf-4b313527298c\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.370004 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-config\") pod \"9a6e3985-ef4a-451a-90cf-4b313527298c\" (UID: \"9a6e3985-ef4a-451a-90cf-4b313527298c\") " Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.370450 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.370487 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.370506 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8k4l\" (UniqueName: \"kubernetes.io/projected/e45b169d-862b-4326-a005-063cead60ac4-kube-api-access-q8k4l\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.370524 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e45b169d-862b-4326-a005-063cead60ac4-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.370543 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e45b169d-862b-4326-a005-063cead60ac4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.371476 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-client-ca" (OuterVolumeSpecName: "client-ca") pod "9a6e3985-ef4a-451a-90cf-4b313527298c" (UID: "9a6e3985-ef4a-451a-90cf-4b313527298c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.371585 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-config" (OuterVolumeSpecName: "config") pod "9a6e3985-ef4a-451a-90cf-4b313527298c" (UID: "9a6e3985-ef4a-451a-90cf-4b313527298c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.374200 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a6e3985-ef4a-451a-90cf-4b313527298c-kube-api-access-7m27v" (OuterVolumeSpecName: "kube-api-access-7m27v") pod "9a6e3985-ef4a-451a-90cf-4b313527298c" (UID: "9a6e3985-ef4a-451a-90cf-4b313527298c"). InnerVolumeSpecName "kube-api-access-7m27v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.374739 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a6e3985-ef4a-451a-90cf-4b313527298c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9a6e3985-ef4a-451a-90cf-4b313527298c" (UID: "9a6e3985-ef4a-451a-90cf-4b313527298c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.471506 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a6e3985-ef4a-451a-90cf-4b313527298c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.471551 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.471563 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m27v\" (UniqueName: \"kubernetes.io/projected/9a6e3985-ef4a-451a-90cf-4b313527298c-kube-api-access-7m27v\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.471574 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9a6e3985-ef4a-451a-90cf-4b313527298c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.592935 4799 generic.go:334] "Generic (PLEG): container finished" podID="9a6e3985-ef4a-451a-90cf-4b313527298c" containerID="0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838" exitCode=0 Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.593067 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" event={"ID":"9a6e3985-ef4a-451a-90cf-4b313527298c","Type":"ContainerDied","Data":"0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838"} Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.593097 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.593206 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj" event={"ID":"9a6e3985-ef4a-451a-90cf-4b313527298c","Type":"ContainerDied","Data":"a6519b631dfef1b7bd0eab44a0fa56980796090daf66a2b25172289a90e1d851"} Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.593244 4799 scope.go:117] "RemoveContainer" containerID="0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.596693 4799 generic.go:334] "Generic (PLEG): container finished" podID="e45b169d-862b-4326-a005-063cead60ac4" containerID="a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe" exitCode=0 Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.596763 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" event={"ID":"e45b169d-862b-4326-a005-063cead60ac4","Type":"ContainerDied","Data":"a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe"} Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.596777 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.596808 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-wr5c4" event={"ID":"e45b169d-862b-4326-a005-063cead60ac4","Type":"ContainerDied","Data":"e434a7b39b86229a24a1e5ad0b85ce42f00e1748bc040e4be64cb04e2b2881e3"} Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.613256 4799 scope.go:117] "RemoveContainer" containerID="0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838" Jan 21 17:38:36 crc kubenswrapper[4799]: E0121 17:38:36.613887 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838\": container with ID starting with 0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838 not found: ID does not exist" containerID="0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.613925 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838"} err="failed to get container status \"0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838\": rpc error: code = NotFound desc = could not find container \"0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838\": container with ID starting with 0f1c18ab009fa3ca61cdd425023b76d4bf4daeb6727c73153354a1032db2c838 not found: ID does not exist" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.613954 4799 scope.go:117] "RemoveContainer" containerID="a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.630955 4799 scope.go:117] "RemoveContainer" containerID="a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe" Jan 21 17:38:36 crc kubenswrapper[4799]: E0121 17:38:36.632093 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe\": container with ID starting with a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe not found: ID does not exist" containerID="a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.632182 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe"} err="failed to get container status \"a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe\": rpc error: code = NotFound desc = could not find container \"a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe\": container with ID starting with a5fe3335ba15b075b24ff8e9dc89c954cf198f86cd90c4d25b200e10413d6bfe not found: ID does not exist" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.638886 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wr5c4"] Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.643840 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-wr5c4"] Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.657251 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj"] Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.661002 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-59kzj"] Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.832973 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5789f55649-mp9hw"] Jan 21 17:38:36 crc kubenswrapper[4799]: E0121 17:38:36.833591 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a6e3985-ef4a-451a-90cf-4b313527298c" containerName="route-controller-manager" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.833610 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a6e3985-ef4a-451a-90cf-4b313527298c" containerName="route-controller-manager" Jan 21 17:38:36 crc kubenswrapper[4799]: E0121 17:38:36.833633 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.833640 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 21 17:38:36 crc kubenswrapper[4799]: E0121 17:38:36.833681 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45b169d-862b-4326-a005-063cead60ac4" containerName="controller-manager" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.833691 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45b169d-862b-4326-a005-063cead60ac4" containerName="controller-manager" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.833876 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.835887 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a6e3985-ef4a-451a-90cf-4b313527298c" containerName="route-controller-manager" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.835912 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e45b169d-862b-4326-a005-063cead60ac4" containerName="controller-manager" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.838244 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.847493 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.847567 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.847502 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.848765 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk"] Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.850605 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.850721 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.850893 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.852059 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.863148 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5789f55649-mp9hw"] Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.863201 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk"] Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.863245 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.868217 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.868417 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.868423 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.868236 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.870152 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.870930 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990272 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s49q7\" (UniqueName: \"kubernetes.io/projected/f674d924-7ddd-48f5-bd9c-cb8f26b78866-kube-api-access-s49q7\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990340 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js29h\" (UniqueName: \"kubernetes.io/projected/c3326aec-578a-4248-824c-3ed5010c5a47-kube-api-access-js29h\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990569 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f674d924-7ddd-48f5-bd9c-cb8f26b78866-serving-cert\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990624 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-client-ca\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990659 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-proxy-ca-bundles\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990708 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3326aec-578a-4248-824c-3ed5010c5a47-serving-cert\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990742 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-client-ca\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990857 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-config\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:36 crc kubenswrapper[4799]: I0121 17:38:36.990881 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-config\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.091980 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js29h\" (UniqueName: \"kubernetes.io/projected/c3326aec-578a-4248-824c-3ed5010c5a47-kube-api-access-js29h\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.092071 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f674d924-7ddd-48f5-bd9c-cb8f26b78866-serving-cert\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.092106 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-client-ca\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.092157 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-proxy-ca-bundles\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.092191 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3326aec-578a-4248-824c-3ed5010c5a47-serving-cert\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.092217 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-client-ca\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.092269 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-config\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.092301 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-config\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.092333 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s49q7\" (UniqueName: \"kubernetes.io/projected/f674d924-7ddd-48f5-bd9c-cb8f26b78866-kube-api-access-s49q7\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.094473 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-client-ca\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.094619 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-client-ca\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.094824 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-config\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.095307 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-config\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.095353 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-proxy-ca-bundles\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.096828 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f674d924-7ddd-48f5-bd9c-cb8f26b78866-serving-cert\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.099154 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3326aec-578a-4248-824c-3ed5010c5a47-serving-cert\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.110903 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js29h\" (UniqueName: \"kubernetes.io/projected/c3326aec-578a-4248-824c-3ed5010c5a47-kube-api-access-js29h\") pod \"controller-manager-5789f55649-mp9hw\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.122141 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s49q7\" (UniqueName: \"kubernetes.io/projected/f674d924-7ddd-48f5-bd9c-cb8f26b78866-kube-api-access-s49q7\") pod \"route-controller-manager-7d94f48599-tkjtk\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.170006 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.189920 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.381846 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5789f55649-mp9hw"] Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.439880 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk"] Jan 21 17:38:37 crc kubenswrapper[4799]: W0121 17:38:37.457164 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf674d924_7ddd_48f5_bd9c_cb8f26b78866.slice/crio-2f10bfdb12e1d3cf80d5204c13b9203cfe2690cf200b1c4a4e78aef597b8ed4a WatchSource:0}: Error finding container 2f10bfdb12e1d3cf80d5204c13b9203cfe2690cf200b1c4a4e78aef597b8ed4a: Status 404 returned error can't find the container with id 2f10bfdb12e1d3cf80d5204c13b9203cfe2690cf200b1c4a4e78aef597b8ed4a Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.530619 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5789f55649-mp9hw"] Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.542460 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk"] Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.604935 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" event={"ID":"f674d924-7ddd-48f5-bd9c-cb8f26b78866","Type":"ContainerStarted","Data":"2f10bfdb12e1d3cf80d5204c13b9203cfe2690cf200b1c4a4e78aef597b8ed4a"} Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.610342 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" event={"ID":"c3326aec-578a-4248-824c-3ed5010c5a47","Type":"ContainerStarted","Data":"df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f"} Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.610403 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" event={"ID":"c3326aec-578a-4248-824c-3ed5010c5a47","Type":"ContainerStarted","Data":"5b69f1daa08d48877a1eca7deeb989a5db80816a2f90cfa7cf1f57a01219491a"} Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.610539 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" podUID="c3326aec-578a-4248-824c-3ed5010c5a47" containerName="controller-manager" containerID="cri-o://df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f" gracePeriod=30 Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.611153 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.612290 4799 patch_prober.go:28] interesting pod/controller-manager-5789f55649-mp9hw container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.63:8443/healthz\": dial tcp 10.217.0.63:8443: connect: connection refused" start-of-body= Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.612363 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" podUID="c3326aec-578a-4248-824c-3ed5010c5a47" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.63:8443/healthz\": dial tcp 10.217.0.63:8443: connect: connection refused" Jan 21 17:38:37 crc kubenswrapper[4799]: I0121 17:38:37.635486 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" podStartSLOduration=2.635459959 podStartE2EDuration="2.635459959s" podCreationTimestamp="2026-01-21 17:38:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:38:37.631993445 +0000 UTC m=+344.258283478" watchObservedRunningTime="2026-01-21 17:38:37.635459959 +0000 UTC m=+344.261749972" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.001562 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-controller-manager_controller-manager-5789f55649-mp9hw_c3326aec-578a-4248-824c-3ed5010c5a47/controller-manager/0.log" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.001661 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.110977 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3326aec-578a-4248-824c-3ed5010c5a47-serving-cert\") pod \"c3326aec-578a-4248-824c-3ed5010c5a47\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.111261 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-proxy-ca-bundles\") pod \"c3326aec-578a-4248-824c-3ed5010c5a47\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.111302 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-client-ca\") pod \"c3326aec-578a-4248-824c-3ed5010c5a47\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.111355 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js29h\" (UniqueName: \"kubernetes.io/projected/c3326aec-578a-4248-824c-3ed5010c5a47-kube-api-access-js29h\") pod \"c3326aec-578a-4248-824c-3ed5010c5a47\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.111392 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-config\") pod \"c3326aec-578a-4248-824c-3ed5010c5a47\" (UID: \"c3326aec-578a-4248-824c-3ed5010c5a47\") " Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.112300 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-client-ca" (OuterVolumeSpecName: "client-ca") pod "c3326aec-578a-4248-824c-3ed5010c5a47" (UID: "c3326aec-578a-4248-824c-3ed5010c5a47"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.112393 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c3326aec-578a-4248-824c-3ed5010c5a47" (UID: "c3326aec-578a-4248-824c-3ed5010c5a47"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.112433 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-config" (OuterVolumeSpecName: "config") pod "c3326aec-578a-4248-824c-3ed5010c5a47" (UID: "c3326aec-578a-4248-824c-3ed5010c5a47"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.120084 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3326aec-578a-4248-824c-3ed5010c5a47-kube-api-access-js29h" (OuterVolumeSpecName: "kube-api-access-js29h") pod "c3326aec-578a-4248-824c-3ed5010c5a47" (UID: "c3326aec-578a-4248-824c-3ed5010c5a47"). InnerVolumeSpecName "kube-api-access-js29h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.121893 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3326aec-578a-4248-824c-3ed5010c5a47-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c3326aec-578a-4248-824c-3ed5010c5a47" (UID: "c3326aec-578a-4248-824c-3ed5010c5a47"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.213056 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a6e3985-ef4a-451a-90cf-4b313527298c" path="/var/lib/kubelet/pods/9a6e3985-ef4a-451a-90cf-4b313527298c/volumes" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.213871 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e45b169d-862b-4326-a005-063cead60ac4" path="/var/lib/kubelet/pods/e45b169d-862b-4326-a005-063cead60ac4/volumes" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.216067 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.216108 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.216120 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js29h\" (UniqueName: \"kubernetes.io/projected/c3326aec-578a-4248-824c-3ed5010c5a47-kube-api-access-js29h\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.216154 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3326aec-578a-4248-824c-3ed5010c5a47-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.216164 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3326aec-578a-4248-824c-3ed5010c5a47-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.621784 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-controller-manager_controller-manager-5789f55649-mp9hw_c3326aec-578a-4248-824c-3ed5010c5a47/controller-manager/0.log" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.621846 4799 generic.go:334] "Generic (PLEG): container finished" podID="c3326aec-578a-4248-824c-3ed5010c5a47" containerID="df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f" exitCode=2 Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.621921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" event={"ID":"c3326aec-578a-4248-824c-3ed5010c5a47","Type":"ContainerDied","Data":"df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f"} Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.621926 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.621957 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5789f55649-mp9hw" event={"ID":"c3326aec-578a-4248-824c-3ed5010c5a47","Type":"ContainerDied","Data":"5b69f1daa08d48877a1eca7deeb989a5db80816a2f90cfa7cf1f57a01219491a"} Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.621985 4799 scope.go:117] "RemoveContainer" containerID="df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.623280 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" event={"ID":"f674d924-7ddd-48f5-bd9c-cb8f26b78866","Type":"ContainerStarted","Data":"6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6"} Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.623521 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" podUID="f674d924-7ddd-48f5-bd9c-cb8f26b78866" containerName="route-controller-manager" containerID="cri-o://6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6" gracePeriod=30 Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.623793 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.630267 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.650311 4799 scope.go:117] "RemoveContainer" containerID="df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f" Jan 21 17:38:38 crc kubenswrapper[4799]: E0121 17:38:38.652503 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f\": container with ID starting with df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f not found: ID does not exist" containerID="df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.652584 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f"} err="failed to get container status \"df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f\": rpc error: code = NotFound desc = could not find container \"df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f\": container with ID starting with df4a52bc14742ba09887c168f5bd23dc3189aa8d3a029ea72a51974e1d72156f not found: ID does not exist" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.654249 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" podStartSLOduration=3.654223557 podStartE2EDuration="3.654223557s" podCreationTimestamp="2026-01-21 17:38:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:38:38.652616708 +0000 UTC m=+345.278906751" watchObservedRunningTime="2026-01-21 17:38:38.654223557 +0000 UTC m=+345.280513580" Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.685529 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5789f55649-mp9hw"] Jan 21 17:38:38 crc kubenswrapper[4799]: I0121 17:38:38.690316 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5789f55649-mp9hw"] Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.001411 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.025752 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-client-ca\") pod \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.025831 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-config\") pod \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.025861 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f674d924-7ddd-48f5-bd9c-cb8f26b78866-serving-cert\") pod \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.025894 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s49q7\" (UniqueName: \"kubernetes.io/projected/f674d924-7ddd-48f5-bd9c-cb8f26b78866-kube-api-access-s49q7\") pod \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\" (UID: \"f674d924-7ddd-48f5-bd9c-cb8f26b78866\") " Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.027002 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-config" (OuterVolumeSpecName: "config") pod "f674d924-7ddd-48f5-bd9c-cb8f26b78866" (UID: "f674d924-7ddd-48f5-bd9c-cb8f26b78866"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.027091 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-client-ca" (OuterVolumeSpecName: "client-ca") pod "f674d924-7ddd-48f5-bd9c-cb8f26b78866" (UID: "f674d924-7ddd-48f5-bd9c-cb8f26b78866"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.030992 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f674d924-7ddd-48f5-bd9c-cb8f26b78866-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f674d924-7ddd-48f5-bd9c-cb8f26b78866" (UID: "f674d924-7ddd-48f5-bd9c-cb8f26b78866"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.031332 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f674d924-7ddd-48f5-bd9c-cb8f26b78866-kube-api-access-s49q7" (OuterVolumeSpecName: "kube-api-access-s49q7") pod "f674d924-7ddd-48f5-bd9c-cb8f26b78866" (UID: "f674d924-7ddd-48f5-bd9c-cb8f26b78866"). InnerVolumeSpecName "kube-api-access-s49q7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.130974 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.131045 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f674d924-7ddd-48f5-bd9c-cb8f26b78866-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.131060 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f674d924-7ddd-48f5-bd9c-cb8f26b78866-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.131076 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s49q7\" (UniqueName: \"kubernetes.io/projected/f674d924-7ddd-48f5-bd9c-cb8f26b78866-kube-api-access-s49q7\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.643258 4799 generic.go:334] "Generic (PLEG): container finished" podID="f674d924-7ddd-48f5-bd9c-cb8f26b78866" containerID="6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6" exitCode=0 Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.643683 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" event={"ID":"f674d924-7ddd-48f5-bd9c-cb8f26b78866","Type":"ContainerDied","Data":"6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6"} Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.643724 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" event={"ID":"f674d924-7ddd-48f5-bd9c-cb8f26b78866","Type":"ContainerDied","Data":"2f10bfdb12e1d3cf80d5204c13b9203cfe2690cf200b1c4a4e78aef597b8ed4a"} Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.643747 4799 scope.go:117] "RemoveContainer" containerID="6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.643897 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.674038 4799 scope.go:117] "RemoveContainer" containerID="6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6" Jan 21 17:38:39 crc kubenswrapper[4799]: E0121 17:38:39.674830 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6\": container with ID starting with 6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6 not found: ID does not exist" containerID="6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.674875 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6"} err="failed to get container status \"6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6\": rpc error: code = NotFound desc = could not find container \"6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6\": container with ID starting with 6c28a48e7611bd9b6f3a3160845fac134f8e288d3c7263a1095ba3ba38fe96a6 not found: ID does not exist" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.679906 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk"] Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.684457 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d94f48599-tkjtk"] Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.886162 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj"] Jan 21 17:38:39 crc kubenswrapper[4799]: E0121 17:38:39.886687 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3326aec-578a-4248-824c-3ed5010c5a47" containerName="controller-manager" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.886724 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3326aec-578a-4248-824c-3ed5010c5a47" containerName="controller-manager" Jan 21 17:38:39 crc kubenswrapper[4799]: E0121 17:38:39.886751 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f674d924-7ddd-48f5-bd9c-cb8f26b78866" containerName="route-controller-manager" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.886761 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f674d924-7ddd-48f5-bd9c-cb8f26b78866" containerName="route-controller-manager" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.886979 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f674d924-7ddd-48f5-bd9c-cb8f26b78866" containerName="route-controller-manager" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.887003 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3326aec-578a-4248-824c-3ed5010c5a47" containerName="controller-manager" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.887845 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.890625 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-75b948c944-mpmxn"] Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.891585 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.895423 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.895513 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.895916 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.895999 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.896040 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.896146 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.896409 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.900664 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-75b948c944-mpmxn"] Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.910244 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj"] Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.911414 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.911762 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.911821 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.911811 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.912214 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 17:38:39 crc kubenswrapper[4799]: I0121 17:38:39.916469 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.084427 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93e97188-de1b-4fc1-8edc-289980d9747a-serving-cert\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.084901 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857d4f73-fe31-40a0-9c01-79aa1c103b23-serving-cert\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.084964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-config\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.085120 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-client-ca\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.085221 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-config\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.085248 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-proxy-ca-bundles\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.085280 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-client-ca\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.085299 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqkxv\" (UniqueName: \"kubernetes.io/projected/93e97188-de1b-4fc1-8edc-289980d9747a-kube-api-access-pqkxv\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.085321 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr449\" (UniqueName: \"kubernetes.io/projected/857d4f73-fe31-40a0-9c01-79aa1c103b23-kube-api-access-wr449\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.185838 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-config\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.185905 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-proxy-ca-bundles\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.185929 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-client-ca\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.185969 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqkxv\" (UniqueName: \"kubernetes.io/projected/93e97188-de1b-4fc1-8edc-289980d9747a-kube-api-access-pqkxv\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.186000 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr449\" (UniqueName: \"kubernetes.io/projected/857d4f73-fe31-40a0-9c01-79aa1c103b23-kube-api-access-wr449\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.186036 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93e97188-de1b-4fc1-8edc-289980d9747a-serving-cert\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.186064 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857d4f73-fe31-40a0-9c01-79aa1c103b23-serving-cert\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.186098 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-config\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.186155 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-client-ca\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.187235 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-client-ca\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.187284 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-client-ca\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.188119 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-config\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.188168 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-proxy-ca-bundles\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.188720 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-config\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.196058 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93e97188-de1b-4fc1-8edc-289980d9747a-serving-cert\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.196183 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857d4f73-fe31-40a0-9c01-79aa1c103b23-serving-cert\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.207477 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr449\" (UniqueName: \"kubernetes.io/projected/857d4f73-fe31-40a0-9c01-79aa1c103b23-kube-api-access-wr449\") pod \"route-controller-manager-5cb5d66878-269gj\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.208619 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqkxv\" (UniqueName: \"kubernetes.io/projected/93e97188-de1b-4fc1-8edc-289980d9747a-kube-api-access-pqkxv\") pod \"controller-manager-75b948c944-mpmxn\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.215092 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3326aec-578a-4248-824c-3ed5010c5a47" path="/var/lib/kubelet/pods/c3326aec-578a-4248-824c-3ed5010c5a47/volumes" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.216039 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f674d924-7ddd-48f5-bd9c-cb8f26b78866" path="/var/lib/kubelet/pods/f674d924-7ddd-48f5-bd9c-cb8f26b78866/volumes" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.216772 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.230337 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.479067 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-75b948c944-mpmxn"] Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.506266 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj"] Jan 21 17:38:40 crc kubenswrapper[4799]: W0121 17:38:40.515826 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod857d4f73_fe31_40a0_9c01_79aa1c103b23.slice/crio-bbd571dfec50da1cf77d69701a2808f6eb88a388b56e12db2eb1fc0d116a9c27 WatchSource:0}: Error finding container bbd571dfec50da1cf77d69701a2808f6eb88a388b56e12db2eb1fc0d116a9c27: Status 404 returned error can't find the container with id bbd571dfec50da1cf77d69701a2808f6eb88a388b56e12db2eb1fc0d116a9c27 Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.662061 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" event={"ID":"857d4f73-fe31-40a0-9c01-79aa1c103b23","Type":"ContainerStarted","Data":"8b9f373f0eb4d712ddcd115818000626281c065437118238894465f58d15cb8f"} Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.662120 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" event={"ID":"857d4f73-fe31-40a0-9c01-79aa1c103b23","Type":"ContainerStarted","Data":"bbd571dfec50da1cf77d69701a2808f6eb88a388b56e12db2eb1fc0d116a9c27"} Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.662594 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.664459 4799 patch_prober.go:28] interesting pod/route-controller-manager-5cb5d66878-269gj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.66:8443/healthz\": dial tcp 10.217.0.66:8443: connect: connection refused" start-of-body= Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.664559 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" podUID="857d4f73-fe31-40a0-9c01-79aa1c103b23" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.66:8443/healthz\": dial tcp 10.217.0.66:8443: connect: connection refused" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.673752 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" event={"ID":"93e97188-de1b-4fc1-8edc-289980d9747a","Type":"ContainerStarted","Data":"dfbfb86186941b0ad1c61786e26553b3ade5d1c5b132f18c577ad3075d958f76"} Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.674071 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" event={"ID":"93e97188-de1b-4fc1-8edc-289980d9747a","Type":"ContainerStarted","Data":"192cbb433572d36e3d70f0b8f021fdd1caefb4713428a35a80ff18b3651e5b09"} Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.674219 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.675664 4799 patch_prober.go:28] interesting pod/controller-manager-75b948c944-mpmxn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" start-of-body= Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.675727 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" podUID="93e97188-de1b-4fc1-8edc-289980d9747a" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.693515 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" podStartSLOduration=3.693467086 podStartE2EDuration="3.693467086s" podCreationTimestamp="2026-01-21 17:38:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:38:40.684298849 +0000 UTC m=+347.310588882" watchObservedRunningTime="2026-01-21 17:38:40.693467086 +0000 UTC m=+347.319757109" Jan 21 17:38:40 crc kubenswrapper[4799]: I0121 17:38:40.713483 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" podStartSLOduration=3.713454488 podStartE2EDuration="3.713454488s" podCreationTimestamp="2026-01-21 17:38:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:38:40.709552021 +0000 UTC m=+347.335842044" watchObservedRunningTime="2026-01-21 17:38:40.713454488 +0000 UTC m=+347.339744511" Jan 21 17:38:42 crc kubenswrapper[4799]: I0121 17:38:41.690384 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:42 crc kubenswrapper[4799]: I0121 17:38:41.691708 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:38:55 crc kubenswrapper[4799]: I0121 17:38:55.630411 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-75b948c944-mpmxn"] Jan 21 17:38:55 crc kubenswrapper[4799]: I0121 17:38:55.631716 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" podUID="93e97188-de1b-4fc1-8edc-289980d9747a" containerName="controller-manager" containerID="cri-o://dfbfb86186941b0ad1c61786e26553b3ade5d1c5b132f18c577ad3075d958f76" gracePeriod=30 Jan 21 17:38:55 crc kubenswrapper[4799]: I0121 17:38:55.801985 4799 generic.go:334] "Generic (PLEG): container finished" podID="93e97188-de1b-4fc1-8edc-289980d9747a" containerID="dfbfb86186941b0ad1c61786e26553b3ade5d1c5b132f18c577ad3075d958f76" exitCode=0 Jan 21 17:38:55 crc kubenswrapper[4799]: I0121 17:38:55.802101 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" event={"ID":"93e97188-de1b-4fc1-8edc-289980d9747a","Type":"ContainerDied","Data":"dfbfb86186941b0ad1c61786e26553b3ade5d1c5b132f18c577ad3075d958f76"} Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.221080 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.400121 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqkxv\" (UniqueName: \"kubernetes.io/projected/93e97188-de1b-4fc1-8edc-289980d9747a-kube-api-access-pqkxv\") pod \"93e97188-de1b-4fc1-8edc-289980d9747a\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.400195 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-proxy-ca-bundles\") pod \"93e97188-de1b-4fc1-8edc-289980d9747a\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.400357 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-client-ca\") pod \"93e97188-de1b-4fc1-8edc-289980d9747a\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.400384 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93e97188-de1b-4fc1-8edc-289980d9747a-serving-cert\") pod \"93e97188-de1b-4fc1-8edc-289980d9747a\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.400444 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-config\") pod \"93e97188-de1b-4fc1-8edc-289980d9747a\" (UID: \"93e97188-de1b-4fc1-8edc-289980d9747a\") " Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.401355 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-client-ca" (OuterVolumeSpecName: "client-ca") pod "93e97188-de1b-4fc1-8edc-289980d9747a" (UID: "93e97188-de1b-4fc1-8edc-289980d9747a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.401547 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-config" (OuterVolumeSpecName: "config") pod "93e97188-de1b-4fc1-8edc-289980d9747a" (UID: "93e97188-de1b-4fc1-8edc-289980d9747a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.401803 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "93e97188-de1b-4fc1-8edc-289980d9747a" (UID: "93e97188-de1b-4fc1-8edc-289980d9747a"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.407553 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93e97188-de1b-4fc1-8edc-289980d9747a-kube-api-access-pqkxv" (OuterVolumeSpecName: "kube-api-access-pqkxv") pod "93e97188-de1b-4fc1-8edc-289980d9747a" (UID: "93e97188-de1b-4fc1-8edc-289980d9747a"). InnerVolumeSpecName "kube-api-access-pqkxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.408160 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93e97188-de1b-4fc1-8edc-289980d9747a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "93e97188-de1b-4fc1-8edc-289980d9747a" (UID: "93e97188-de1b-4fc1-8edc-289980d9747a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.502426 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.502484 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93e97188-de1b-4fc1-8edc-289980d9747a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.502496 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.502507 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqkxv\" (UniqueName: \"kubernetes.io/projected/93e97188-de1b-4fc1-8edc-289980d9747a-kube-api-access-pqkxv\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.502527 4799 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93e97188-de1b-4fc1-8edc-289980d9747a-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.811928 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" event={"ID":"93e97188-de1b-4fc1-8edc-289980d9747a","Type":"ContainerDied","Data":"192cbb433572d36e3d70f0b8f021fdd1caefb4713428a35a80ff18b3651e5b09"} Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.812025 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75b948c944-mpmxn" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.812034 4799 scope.go:117] "RemoveContainer" containerID="dfbfb86186941b0ad1c61786e26553b3ade5d1c5b132f18c577ad3075d958f76" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.848951 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-75b948c944-mpmxn"] Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.853357 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-558b484bbd-n8296"] Jan 21 17:38:56 crc kubenswrapper[4799]: E0121 17:38:56.853779 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93e97188-de1b-4fc1-8edc-289980d9747a" containerName="controller-manager" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.853809 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="93e97188-de1b-4fc1-8edc-289980d9747a" containerName="controller-manager" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.853981 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="93e97188-de1b-4fc1-8edc-289980d9747a" containerName="controller-manager" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.854574 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.857689 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-75b948c944-mpmxn"] Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.857915 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.857915 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.858168 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.858211 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.858938 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.867223 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-558b484bbd-n8296"] Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.869179 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.869536 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.910522 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfr42\" (UniqueName: \"kubernetes.io/projected/5e4d3240-6133-4fa6-8594-eaa9e6291661-kube-api-access-nfr42\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.911167 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-client-ca\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.911202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-config\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.911282 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-proxy-ca-bundles\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:56 crc kubenswrapper[4799]: I0121 17:38:56.911437 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e4d3240-6133-4fa6-8594-eaa9e6291661-serving-cert\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.013047 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-client-ca\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.013142 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-config\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.013200 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-proxy-ca-bundles\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.013240 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e4d3240-6133-4fa6-8594-eaa9e6291661-serving-cert\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.013297 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfr42\" (UniqueName: \"kubernetes.io/projected/5e4d3240-6133-4fa6-8594-eaa9e6291661-kube-api-access-nfr42\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.014605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-proxy-ca-bundles\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.014817 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-client-ca\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.015096 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e4d3240-6133-4fa6-8594-eaa9e6291661-config\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.019653 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e4d3240-6133-4fa6-8594-eaa9e6291661-serving-cert\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.039349 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfr42\" (UniqueName: \"kubernetes.io/projected/5e4d3240-6133-4fa6-8594-eaa9e6291661-kube-api-access-nfr42\") pod \"controller-manager-558b484bbd-n8296\" (UID: \"5e4d3240-6133-4fa6-8594-eaa9e6291661\") " pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.176991 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.655869 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-558b484bbd-n8296"] Jan 21 17:38:57 crc kubenswrapper[4799]: W0121 17:38:57.665677 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e4d3240_6133_4fa6_8594_eaa9e6291661.slice/crio-03afd941d07b5845ed12eb09b4038a516721f540477c830e28e404618eacc407 WatchSource:0}: Error finding container 03afd941d07b5845ed12eb09b4038a516721f540477c830e28e404618eacc407: Status 404 returned error can't find the container with id 03afd941d07b5845ed12eb09b4038a516721f540477c830e28e404618eacc407 Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.820236 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" event={"ID":"5e4d3240-6133-4fa6-8594-eaa9e6291661","Type":"ContainerStarted","Data":"6f9c6f4bf5a8677216f5cdb806c5d3bd8cd330ee9c20b6ee5349898fc0367f25"} Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.820694 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" event={"ID":"5e4d3240-6133-4fa6-8594-eaa9e6291661","Type":"ContainerStarted","Data":"03afd941d07b5845ed12eb09b4038a516721f540477c830e28e404618eacc407"} Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.820982 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.823517 4799 patch_prober.go:28] interesting pod/controller-manager-558b484bbd-n8296 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" start-of-body= Jan 21 17:38:57 crc kubenswrapper[4799]: I0121 17:38:57.823589 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" podUID="5e4d3240-6133-4fa6-8594-eaa9e6291661" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" Jan 21 17:38:58 crc kubenswrapper[4799]: I0121 17:38:58.212757 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93e97188-de1b-4fc1-8edc-289980d9747a" path="/var/lib/kubelet/pods/93e97188-de1b-4fc1-8edc-289980d9747a/volumes" Jan 21 17:38:58 crc kubenswrapper[4799]: I0121 17:38:58.833939 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" Jan 21 17:38:58 crc kubenswrapper[4799]: I0121 17:38:58.852842 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-558b484bbd-n8296" podStartSLOduration=3.852795714 podStartE2EDuration="3.852795714s" podCreationTimestamp="2026-01-21 17:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:38:57.855165823 +0000 UTC m=+364.481455856" watchObservedRunningTime="2026-01-21 17:38:58.852795714 +0000 UTC m=+365.479085737" Jan 21 17:39:25 crc kubenswrapper[4799]: I0121 17:39:25.971002 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:39:25 crc kubenswrapper[4799]: I0121 17:39:25.971948 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:39:35 crc kubenswrapper[4799]: I0121 17:39:35.632353 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj"] Jan 21 17:39:35 crc kubenswrapper[4799]: I0121 17:39:35.633024 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" podUID="857d4f73-fe31-40a0-9c01-79aa1c103b23" containerName="route-controller-manager" containerID="cri-o://8b9f373f0eb4d712ddcd115818000626281c065437118238894465f58d15cb8f" gracePeriod=30 Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.083215 4799 generic.go:334] "Generic (PLEG): container finished" podID="857d4f73-fe31-40a0-9c01-79aa1c103b23" containerID="8b9f373f0eb4d712ddcd115818000626281c065437118238894465f58d15cb8f" exitCode=0 Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.083310 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" event={"ID":"857d4f73-fe31-40a0-9c01-79aa1c103b23","Type":"ContainerDied","Data":"8b9f373f0eb4d712ddcd115818000626281c065437118238894465f58d15cb8f"} Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.083665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" event={"ID":"857d4f73-fe31-40a0-9c01-79aa1c103b23","Type":"ContainerDied","Data":"bbd571dfec50da1cf77d69701a2808f6eb88a388b56e12db2eb1fc0d116a9c27"} Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.083699 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbd571dfec50da1cf77d69701a2808f6eb88a388b56e12db2eb1fc0d116a9c27" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.086901 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.165524 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr449\" (UniqueName: \"kubernetes.io/projected/857d4f73-fe31-40a0-9c01-79aa1c103b23-kube-api-access-wr449\") pod \"857d4f73-fe31-40a0-9c01-79aa1c103b23\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.165591 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857d4f73-fe31-40a0-9c01-79aa1c103b23-serving-cert\") pod \"857d4f73-fe31-40a0-9c01-79aa1c103b23\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.165650 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-client-ca\") pod \"857d4f73-fe31-40a0-9c01-79aa1c103b23\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.165752 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-config\") pod \"857d4f73-fe31-40a0-9c01-79aa1c103b23\" (UID: \"857d4f73-fe31-40a0-9c01-79aa1c103b23\") " Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.166549 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-client-ca" (OuterVolumeSpecName: "client-ca") pod "857d4f73-fe31-40a0-9c01-79aa1c103b23" (UID: "857d4f73-fe31-40a0-9c01-79aa1c103b23"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.166706 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-config" (OuterVolumeSpecName: "config") pod "857d4f73-fe31-40a0-9c01-79aa1c103b23" (UID: "857d4f73-fe31-40a0-9c01-79aa1c103b23"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.177313 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857d4f73-fe31-40a0-9c01-79aa1c103b23-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "857d4f73-fe31-40a0-9c01-79aa1c103b23" (UID: "857d4f73-fe31-40a0-9c01-79aa1c103b23"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.183703 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/857d4f73-fe31-40a0-9c01-79aa1c103b23-kube-api-access-wr449" (OuterVolumeSpecName: "kube-api-access-wr449") pod "857d4f73-fe31-40a0-9c01-79aa1c103b23" (UID: "857d4f73-fe31-40a0-9c01-79aa1c103b23"). InnerVolumeSpecName "kube-api-access-wr449". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.269438 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.269507 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr449\" (UniqueName: \"kubernetes.io/projected/857d4f73-fe31-40a0-9c01-79aa1c103b23-kube-api-access-wr449\") on node \"crc\" DevicePath \"\"" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.269529 4799 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857d4f73-fe31-40a0-9c01-79aa1c103b23-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.269544 4799 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857d4f73-fe31-40a0-9c01-79aa1c103b23-client-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.898744 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8"] Jan 21 17:39:36 crc kubenswrapper[4799]: E0121 17:39:36.899059 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857d4f73-fe31-40a0-9c01-79aa1c103b23" containerName="route-controller-manager" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.899075 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="857d4f73-fe31-40a0-9c01-79aa1c103b23" containerName="route-controller-manager" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.899234 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="857d4f73-fe31-40a0-9c01-79aa1c103b23" containerName="route-controller-manager" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.899684 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:36 crc kubenswrapper[4799]: I0121 17:39:36.913423 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8"] Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.021659 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-serving-cert\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.021733 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-config\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.021988 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls2fs\" (UniqueName: \"kubernetes.io/projected/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-kube-api-access-ls2fs\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.022183 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-client-ca\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.089782 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.110454 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj"] Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.117824 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5cb5d66878-269gj"] Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.124007 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-client-ca\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.124082 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-serving-cert\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.124117 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-config\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.124186 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls2fs\" (UniqueName: \"kubernetes.io/projected/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-kube-api-access-ls2fs\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.125304 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-client-ca\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.125624 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-config\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.129283 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-serving-cert\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.152993 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls2fs\" (UniqueName: \"kubernetes.io/projected/0e88f2c2-5ec7-4968-ab30-2efbc5ed6675-kube-api-access-ls2fs\") pod \"route-controller-manager-cf5456f7f-jbsq8\" (UID: \"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675\") " pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.215438 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:37 crc kubenswrapper[4799]: I0121 17:39:37.618053 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8"] Jan 21 17:39:38 crc kubenswrapper[4799]: I0121 17:39:38.098310 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" event={"ID":"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675","Type":"ContainerStarted","Data":"6ca70f51f8bd159010aa0d6992ae871946ee2211d91176061be5a2e9f2843228"} Jan 21 17:39:38 crc kubenswrapper[4799]: I0121 17:39:38.100228 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:38 crc kubenswrapper[4799]: I0121 17:39:38.100353 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" event={"ID":"0e88f2c2-5ec7-4968-ab30-2efbc5ed6675","Type":"ContainerStarted","Data":"37a4d277a1ce62c22586cabcfa5c72ca1c9e66c8458d368111dfd3f4689d23e0"} Jan 21 17:39:38 crc kubenswrapper[4799]: I0121 17:39:38.213507 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="857d4f73-fe31-40a0-9c01-79aa1c103b23" path="/var/lib/kubelet/pods/857d4f73-fe31-40a0-9c01-79aa1c103b23/volumes" Jan 21 17:39:38 crc kubenswrapper[4799]: I0121 17:39:38.302664 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" Jan 21 17:39:38 crc kubenswrapper[4799]: I0121 17:39:38.327816 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-cf5456f7f-jbsq8" podStartSLOduration=3.327789656 podStartE2EDuration="3.327789656s" podCreationTimestamp="2026-01-21 17:39:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:39:38.133875219 +0000 UTC m=+404.760165232" watchObservedRunningTime="2026-01-21 17:39:38.327789656 +0000 UTC m=+404.954079679" Jan 21 17:39:41 crc kubenswrapper[4799]: I0121 17:39:41.815949 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2trx2"] Jan 21 17:39:41 crc kubenswrapper[4799]: I0121 17:39:41.817030 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:41 crc kubenswrapper[4799]: I0121 17:39:41.842209 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2trx2"] Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.010256 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-registry-certificates\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.010664 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-trusted-ca\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.010701 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-bound-sa-token\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.010745 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4wfc\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-kube-api-access-j4wfc\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.010874 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.010993 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.011080 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-registry-tls\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.011194 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.039479 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.113046 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-trusted-ca\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.113151 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-bound-sa-token\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.113217 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4wfc\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-kube-api-access-j4wfc\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.113257 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.113325 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-registry-tls\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.113382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.113438 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-registry-certificates\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.114262 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.115421 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-trusted-ca\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.117513 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-registry-certificates\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.131059 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-registry-tls\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.131048 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.135869 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4wfc\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-kube-api-access-j4wfc\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.137976 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a8c07ac1-fb1b-42ad-808e-29ba2259f8d6-bound-sa-token\") pod \"image-registry-66df7c8f76-2trx2\" (UID: \"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6\") " pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.141765 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:42 crc kubenswrapper[4799]: I0121 17:39:42.562757 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2trx2"] Jan 21 17:39:42 crc kubenswrapper[4799]: W0121 17:39:42.573649 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8c07ac1_fb1b_42ad_808e_29ba2259f8d6.slice/crio-8254ea542514c4d000f34dbd5bde70caac4a133ecb43d262e25e9fd27009721b WatchSource:0}: Error finding container 8254ea542514c4d000f34dbd5bde70caac4a133ecb43d262e25e9fd27009721b: Status 404 returned error can't find the container with id 8254ea542514c4d000f34dbd5bde70caac4a133ecb43d262e25e9fd27009721b Jan 21 17:39:43 crc kubenswrapper[4799]: I0121 17:39:43.144918 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" event={"ID":"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6","Type":"ContainerStarted","Data":"af06aadda10822d39132848e9e5b007d5858e4e4eca524aec0d9dc8771647e5c"} Jan 21 17:39:43 crc kubenswrapper[4799]: I0121 17:39:43.144999 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" event={"ID":"a8c07ac1-fb1b-42ad-808e-29ba2259f8d6","Type":"ContainerStarted","Data":"8254ea542514c4d000f34dbd5bde70caac4a133ecb43d262e25e9fd27009721b"} Jan 21 17:39:43 crc kubenswrapper[4799]: I0121 17:39:43.171606 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" podStartSLOduration=2.171580123 podStartE2EDuration="2.171580123s" podCreationTimestamp="2026-01-21 17:39:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:39:43.165856967 +0000 UTC m=+409.792147010" watchObservedRunningTime="2026-01-21 17:39:43.171580123 +0000 UTC m=+409.797870166" Jan 21 17:39:44 crc kubenswrapper[4799]: I0121 17:39:44.152936 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:39:55 crc kubenswrapper[4799]: I0121 17:39:55.970784 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:39:55 crc kubenswrapper[4799]: I0121 17:39:55.971735 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:40:02 crc kubenswrapper[4799]: I0121 17:40:02.149011 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-2trx2" Jan 21 17:40:02 crc kubenswrapper[4799]: I0121 17:40:02.244116 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wqt42"] Jan 21 17:40:25 crc kubenswrapper[4799]: I0121 17:40:25.971663 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:40:25 crc kubenswrapper[4799]: I0121 17:40:25.972118 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:40:25 crc kubenswrapper[4799]: I0121 17:40:25.972242 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:40:25 crc kubenswrapper[4799]: I0121 17:40:25.973254 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b6462558f849eafd1973a1b2319347dad0de9388ecab61e98f6ea685f2b55daa"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 17:40:25 crc kubenswrapper[4799]: I0121 17:40:25.973355 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://b6462558f849eafd1973a1b2319347dad0de9388ecab61e98f6ea685f2b55daa" gracePeriod=600 Jan 21 17:40:26 crc kubenswrapper[4799]: I0121 17:40:26.413506 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="b6462558f849eafd1973a1b2319347dad0de9388ecab61e98f6ea685f2b55daa" exitCode=0 Jan 21 17:40:26 crc kubenswrapper[4799]: I0121 17:40:26.413594 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"b6462558f849eafd1973a1b2319347dad0de9388ecab61e98f6ea685f2b55daa"} Jan 21 17:40:26 crc kubenswrapper[4799]: I0121 17:40:26.413978 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"cf754122da61833aa1525f3575372a725cd96a25aa66c0876c3c4f82026fd7ab"} Jan 21 17:40:26 crc kubenswrapper[4799]: I0121 17:40:26.414007 4799 scope.go:117] "RemoveContainer" containerID="7d35f81e9de188fee4aae8d0185f93aeb74cde7b5b23e1c093726ce0d4dafe31" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.290053 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" podUID="d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" containerName="registry" containerID="cri-o://179884f2edbcc76d15f1cbd068c7e2ba36779347094045510cf75e7bb6226e86" gracePeriod=30 Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.422881 4799 generic.go:334] "Generic (PLEG): container finished" podID="d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" containerID="179884f2edbcc76d15f1cbd068c7e2ba36779347094045510cf75e7bb6226e86" exitCode=0 Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.422997 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" event={"ID":"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3","Type":"ContainerDied","Data":"179884f2edbcc76d15f1cbd068c7e2ba36779347094045510cf75e7bb6226e86"} Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.794550 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.966285 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-ca-trust-extracted\") pod \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.966349 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-bound-sa-token\") pod \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.966424 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-certificates\") pod \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.966563 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.966602 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzp92\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-kube-api-access-vzp92\") pod \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.966652 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-installation-pull-secrets\") pod \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.966704 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-tls\") pod \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.966753 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-trusted-ca\") pod \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\" (UID: \"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3\") " Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.967976 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.968653 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.974864 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-kube-api-access-vzp92" (OuterVolumeSpecName: "kube-api-access-vzp92") pod "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3"). InnerVolumeSpecName "kube-api-access-vzp92". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.975234 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.975735 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.976634 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.978878 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 21 17:40:27 crc kubenswrapper[4799]: I0121 17:40:27.985315 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" (UID: "d3d55c34-d00c-4bc2-81e9-f65f2201a5c3"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.068391 4799 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.068439 4799 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.068460 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzp92\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-kube-api-access-vzp92\") on node \"crc\" DevicePath \"\"" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.068471 4799 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.068481 4799 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.068489 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.068498 4799 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.438391 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" event={"ID":"d3d55c34-d00c-4bc2-81e9-f65f2201a5c3","Type":"ContainerDied","Data":"459bb4056a2d8d8049ece69f7e13d569da9a9dc399189618f5ad4d910cde5109"} Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.438821 4799 scope.go:117] "RemoveContainer" containerID="179884f2edbcc76d15f1cbd068c7e2ba36779347094045510cf75e7bb6226e86" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.438462 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wqt42" Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.467967 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wqt42"] Jan 21 17:40:28 crc kubenswrapper[4799]: I0121 17:40:28.472197 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wqt42"] Jan 21 17:40:30 crc kubenswrapper[4799]: I0121 17:40:30.213664 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" path="/var/lib/kubelet/pods/d3d55c34-d00c-4bc2-81e9-f65f2201a5c3/volumes" Jan 21 17:42:55 crc kubenswrapper[4799]: I0121 17:42:55.971124 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:42:55 crc kubenswrapper[4799]: I0121 17:42:55.971851 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.522315 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6"] Jan 21 17:42:56 crc kubenswrapper[4799]: E0121 17:42:56.522812 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" containerName="registry" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.522859 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" containerName="registry" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.523048 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3d55c34-d00c-4bc2-81e9-f65f2201a5c3" containerName="registry" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.523656 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.524735 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-ct8cl"] Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.525575 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-ct8cl" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.528468 4799 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-6dztq" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.528525 4799 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-c6ph5" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.528756 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.528977 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.566553 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-xt8bd"] Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.571898 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.574601 4799 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-nml2n" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.578632 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-ct8cl"] Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.592922 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-xt8bd"] Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.600651 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6"] Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.620538 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2fqf\" (UniqueName: \"kubernetes.io/projected/b64d5a15-e3a7-45be-a22f-730946419bd4-kube-api-access-m2fqf\") pod \"cert-manager-cainjector-cf98fcc89-pmjm6\" (UID: \"b64d5a15-e3a7-45be-a22f-730946419bd4\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.722103 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2fqf\" (UniqueName: \"kubernetes.io/projected/b64d5a15-e3a7-45be-a22f-730946419bd4-kube-api-access-m2fqf\") pod \"cert-manager-cainjector-cf98fcc89-pmjm6\" (UID: \"b64d5a15-e3a7-45be-a22f-730946419bd4\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.722520 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n49hd\" (UniqueName: \"kubernetes.io/projected/de26c870-5c19-414b-9222-c0cd1419550d-kube-api-access-n49hd\") pod \"cert-manager-webhook-687f57d79b-xt8bd\" (UID: \"de26c870-5c19-414b-9222-c0cd1419550d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.722983 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zlf6\" (UniqueName: \"kubernetes.io/projected/48e93168-c733-4355-b1b2-5cfd895ed094-kube-api-access-8zlf6\") pod \"cert-manager-858654f9db-ct8cl\" (UID: \"48e93168-c733-4355-b1b2-5cfd895ed094\") " pod="cert-manager/cert-manager-858654f9db-ct8cl" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.748528 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2fqf\" (UniqueName: \"kubernetes.io/projected/b64d5a15-e3a7-45be-a22f-730946419bd4-kube-api-access-m2fqf\") pod \"cert-manager-cainjector-cf98fcc89-pmjm6\" (UID: \"b64d5a15-e3a7-45be-a22f-730946419bd4\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.825369 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zlf6\" (UniqueName: \"kubernetes.io/projected/48e93168-c733-4355-b1b2-5cfd895ed094-kube-api-access-8zlf6\") pod \"cert-manager-858654f9db-ct8cl\" (UID: \"48e93168-c733-4355-b1b2-5cfd895ed094\") " pod="cert-manager/cert-manager-858654f9db-ct8cl" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.825984 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n49hd\" (UniqueName: \"kubernetes.io/projected/de26c870-5c19-414b-9222-c0cd1419550d-kube-api-access-n49hd\") pod \"cert-manager-webhook-687f57d79b-xt8bd\" (UID: \"de26c870-5c19-414b-9222-c0cd1419550d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.845270 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n49hd\" (UniqueName: \"kubernetes.io/projected/de26c870-5c19-414b-9222-c0cd1419550d-kube-api-access-n49hd\") pod \"cert-manager-webhook-687f57d79b-xt8bd\" (UID: \"de26c870-5c19-414b-9222-c0cd1419550d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.845468 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.845822 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zlf6\" (UniqueName: \"kubernetes.io/projected/48e93168-c733-4355-b1b2-5cfd895ed094-kube-api-access-8zlf6\") pod \"cert-manager-858654f9db-ct8cl\" (UID: \"48e93168-c733-4355-b1b2-5cfd895ed094\") " pod="cert-manager/cert-manager-858654f9db-ct8cl" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.884956 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-ct8cl" Jan 21 17:42:56 crc kubenswrapper[4799]: I0121 17:42:56.893267 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" Jan 21 17:42:57 crc kubenswrapper[4799]: I0121 17:42:57.124109 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6"] Jan 21 17:42:57 crc kubenswrapper[4799]: I0121 17:42:57.141435 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 17:42:57 crc kubenswrapper[4799]: I0121 17:42:57.164182 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-ct8cl"] Jan 21 17:42:57 crc kubenswrapper[4799]: W0121 17:42:57.172191 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48e93168_c733_4355_b1b2_5cfd895ed094.slice/crio-bc5292151db39df278c9afa31b051d529783aea6f060861b726d3fd70c6e112d WatchSource:0}: Error finding container bc5292151db39df278c9afa31b051d529783aea6f060861b726d3fd70c6e112d: Status 404 returned error can't find the container with id bc5292151db39df278c9afa31b051d529783aea6f060861b726d3fd70c6e112d Jan 21 17:42:57 crc kubenswrapper[4799]: I0121 17:42:57.461978 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-xt8bd"] Jan 21 17:42:57 crc kubenswrapper[4799]: W0121 17:42:57.468242 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde26c870_5c19_414b_9222_c0cd1419550d.slice/crio-b52f505f1e3d4efa86decc6f61b24ce8ed5cba2ccc343eafb6b2eec87ccb5956 WatchSource:0}: Error finding container b52f505f1e3d4efa86decc6f61b24ce8ed5cba2ccc343eafb6b2eec87ccb5956: Status 404 returned error can't find the container with id b52f505f1e3d4efa86decc6f61b24ce8ed5cba2ccc343eafb6b2eec87ccb5956 Jan 21 17:42:57 crc kubenswrapper[4799]: I0121 17:42:57.528810 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" event={"ID":"de26c870-5c19-414b-9222-c0cd1419550d","Type":"ContainerStarted","Data":"b52f505f1e3d4efa86decc6f61b24ce8ed5cba2ccc343eafb6b2eec87ccb5956"} Jan 21 17:42:57 crc kubenswrapper[4799]: I0121 17:42:57.530352 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6" event={"ID":"b64d5a15-e3a7-45be-a22f-730946419bd4","Type":"ContainerStarted","Data":"68b6efaf2277621b237ff52e9382ff4a2307d99f76ebe010165fe3649b06f516"} Jan 21 17:42:57 crc kubenswrapper[4799]: I0121 17:42:57.531644 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-ct8cl" event={"ID":"48e93168-c733-4355-b1b2-5cfd895ed094","Type":"ContainerStarted","Data":"bc5292151db39df278c9afa31b051d529783aea6f060861b726d3fd70c6e112d"} Jan 21 17:43:00 crc kubenswrapper[4799]: I0121 17:43:00.552860 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6" event={"ID":"b64d5a15-e3a7-45be-a22f-730946419bd4","Type":"ContainerStarted","Data":"b3a99198fe991f4dd89549a51b703e374058165914b08efd483f8618645cb1b2"} Jan 21 17:43:00 crc kubenswrapper[4799]: I0121 17:43:00.572109 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-pmjm6" podStartSLOduration=1.987527198 podStartE2EDuration="4.572062756s" podCreationTimestamp="2026-01-21 17:42:56 +0000 UTC" firstStartedPulling="2026-01-21 17:42:57.141101567 +0000 UTC m=+603.767391590" lastFinishedPulling="2026-01-21 17:42:59.725637125 +0000 UTC m=+606.351927148" observedRunningTime="2026-01-21 17:43:00.567046892 +0000 UTC m=+607.193336925" watchObservedRunningTime="2026-01-21 17:43:00.572062756 +0000 UTC m=+607.198352779" Jan 21 17:43:02 crc kubenswrapper[4799]: I0121 17:43:02.626852 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-ct8cl" event={"ID":"48e93168-c733-4355-b1b2-5cfd895ed094","Type":"ContainerStarted","Data":"cb242707727419771500362268ce4c03c3bcee372f1bfec60c7ce0d980d8626c"} Jan 21 17:43:02 crc kubenswrapper[4799]: I0121 17:43:02.633641 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" event={"ID":"de26c870-5c19-414b-9222-c0cd1419550d","Type":"ContainerStarted","Data":"787c908495735cf799964e02b00a7f62b623ba8750d503af1ceace9680332a67"} Jan 21 17:43:02 crc kubenswrapper[4799]: I0121 17:43:02.633855 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" Jan 21 17:43:02 crc kubenswrapper[4799]: I0121 17:43:02.653665 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-ct8cl" podStartSLOduration=2.285996905 podStartE2EDuration="6.653623072s" podCreationTimestamp="2026-01-21 17:42:56 +0000 UTC" firstStartedPulling="2026-01-21 17:42:57.176020088 +0000 UTC m=+603.802310111" lastFinishedPulling="2026-01-21 17:43:01.543646255 +0000 UTC m=+608.169936278" observedRunningTime="2026-01-21 17:43:02.650246485 +0000 UTC m=+609.276536528" watchObservedRunningTime="2026-01-21 17:43:02.653623072 +0000 UTC m=+609.279913085" Jan 21 17:43:02 crc kubenswrapper[4799]: I0121 17:43:02.684450 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" podStartSLOduration=2.651283029 podStartE2EDuration="6.684420515s" podCreationTimestamp="2026-01-21 17:42:56 +0000 UTC" firstStartedPulling="2026-01-21 17:42:57.47496811 +0000 UTC m=+604.101258133" lastFinishedPulling="2026-01-21 17:43:01.508105596 +0000 UTC m=+608.134395619" observedRunningTime="2026-01-21 17:43:02.670814835 +0000 UTC m=+609.297104858" watchObservedRunningTime="2026-01-21 17:43:02.684420515 +0000 UTC m=+609.310710548" Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.844156 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6qqjg"] Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.845204 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovn-controller" containerID="cri-o://3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7" gracePeriod=30 Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.845291 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="northd" containerID="cri-o://f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6" gracePeriod=30 Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.845312 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="sbdb" containerID="cri-o://e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" gracePeriod=30 Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.845246 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448" gracePeriod=30 Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.845405 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovn-acl-logging" containerID="cri-o://f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0" gracePeriod=30 Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.845445 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kube-rbac-proxy-node" containerID="cri-o://9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef" gracePeriod=30 Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.845498 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="nbdb" containerID="cri-o://ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" gracePeriod=30 Jan 21 17:43:05 crc kubenswrapper[4799]: I0121 17:43:05.916812 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" containerID="cri-o://417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac" gracePeriod=30 Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.023633 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e is running failed: container process not found" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.024249 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e is running failed: container process not found" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.026387 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e is running failed: container process not found" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.026524 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="nbdb" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.026430 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.028470 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.030380 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.030438 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="sbdb" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.150648 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/3.log" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.153064 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovn-acl-logging/0.log" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.153697 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovn-controller/0.log" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.154302 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.220676 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-s58mz"] Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221038 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kubecfg-setup" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221080 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kubecfg-setup" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221099 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kube-rbac-proxy-node" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221108 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kube-rbac-proxy-node" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221149 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221158 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221170 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovn-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221178 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovn-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221191 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="sbdb" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221201 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="sbdb" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221214 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221223 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221236 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="northd" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221244 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="northd" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221256 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="nbdb" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221264 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="nbdb" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221273 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221280 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221292 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221300 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221317 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovn-acl-logging" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221326 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovn-acl-logging" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221338 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kube-rbac-proxy-ovn-metrics" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221348 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kube-rbac-proxy-ovn-metrics" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221530 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="sbdb" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221546 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovn-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221556 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovn-acl-logging" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221564 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="northd" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221578 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="nbdb" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221590 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221600 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kube-rbac-proxy-ovn-metrics" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221611 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221621 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="kube-rbac-proxy-node" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221630 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221641 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.221777 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221788 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.221946 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6770819e-2fef-4203-9c5f-504628af7b66" containerName="ovnkube-controller" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.224646 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.242533 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-script-lib\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.242602 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-env-overrides\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.242634 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-openvswitch\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.242662 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-ovn\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.242710 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-etc-openvswitch\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.242809 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.242819 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.242878 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243277 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-systemd-units\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243303 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-kubelet\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243322 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-netns\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243344 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-config\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243352 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243398 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243388 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8s9k\" (UniqueName: \"kubernetes.io/projected/6770819e-2fef-4203-9c5f-504628af7b66-kube-api-access-p8s9k\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243488 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6770819e-2fef-4203-9c5f-504628af7b66-ovn-node-metrics-cert\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243529 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-slash\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243426 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243490 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243781 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-systemd\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243814 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243830 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-slash" (OuterVolumeSpecName: "host-slash") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.243995 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244260 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-ovn-kubernetes\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244369 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-var-lib-cni-networks-ovn-kubernetes\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244412 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-bin\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244454 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-var-lib-openvswitch\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244475 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-netd\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244545 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-node-log\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244609 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-log-socket\") pod \"6770819e-2fef-4203-9c5f-504628af7b66\" (UID: \"6770819e-2fef-4203-9c5f-504628af7b66\") " Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244796 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244817 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-systemd\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244890 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244888 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-run-netns\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244936 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-var-lib-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244956 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244971 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.244987 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-node-log\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245034 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245083 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-systemd-units\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245080 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245250 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-node-log" (OuterVolumeSpecName: "node-log") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245285 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-log-socket" (OuterVolumeSpecName: "log-socket") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245467 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245596 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-run-ovn-kubernetes\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245934 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-slash\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.245970 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghf98\" (UniqueName: \"kubernetes.io/projected/f60e6624-ebc7-4a4c-9751-8329c56bcb14-kube-api-access-ghf98\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246036 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-cni-bin\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246066 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-cni-netd\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246114 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovnkube-config\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246189 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-etc-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246212 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-env-overrides\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246252 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovnkube-script-lib\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246339 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-kubelet\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246373 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-ovn\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.246397 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovn-node-metrics-cert\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247190 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-log-socket\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247285 4799 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247301 4799 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247313 4799 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247328 4799 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247342 4799 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247353 4799 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247365 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247380 4799 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-slash\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247392 4799 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247404 4799 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247415 4799 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247453 4799 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247486 4799 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247496 4799 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-node-log\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247509 4799 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-log-socket\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247520 4799 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.247528 4799 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6770819e-2fef-4203-9c5f-504628af7b66-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.252836 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6770819e-2fef-4203-9c5f-504628af7b66-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.256491 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6770819e-2fef-4203-9c5f-504628af7b66-kube-api-access-p8s9k" (OuterVolumeSpecName: "kube-api-access-p8s9k") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "kube-api-access-p8s9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.266355 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "6770819e-2fef-4203-9c5f-504628af7b66" (UID: "6770819e-2fef-4203-9c5f-504628af7b66"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.349789 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-log-socket\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.349878 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-systemd\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.349909 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-run-netns\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.349935 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-var-lib-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.349958 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.349986 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-node-log\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.349974 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-log-socket\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350015 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-systemd-units\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350083 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350138 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-systemd\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350154 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350196 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-run-ovn-kubernetes\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350207 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-systemd-units\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350212 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350070 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-run-netns\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350078 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-node-log\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350119 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-var-lib-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-slash\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350318 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghf98\" (UniqueName: \"kubernetes.io/projected/f60e6624-ebc7-4a4c-9751-8329c56bcb14-kube-api-access-ghf98\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350347 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-run-ovn-kubernetes\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350353 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-cni-bin\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350383 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-cni-netd\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350391 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-slash\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350404 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovnkube-config\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350427 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-cni-bin\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350452 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-etc-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350471 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-env-overrides\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350494 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovnkube-script-lib\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350536 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-kubelet\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350559 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-ovn\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350575 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovn-node-metrics-cert\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350627 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8s9k\" (UniqueName: \"kubernetes.io/projected/6770819e-2fef-4203-9c5f-504628af7b66-kube-api-access-p8s9k\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350641 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6770819e-2fef-4203-9c5f-504628af7b66-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.350655 4799 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6770819e-2fef-4203-9c5f-504628af7b66-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.351098 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-cni-netd\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.351246 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-etc-openvswitch\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.352184 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovnkube-script-lib\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.352245 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-run-ovn\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.352255 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f60e6624-ebc7-4a4c-9751-8329c56bcb14-host-kubelet\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.352662 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovnkube-config\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.352994 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f60e6624-ebc7-4a4c-9751-8329c56bcb14-env-overrides\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.355868 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f60e6624-ebc7-4a4c-9751-8329c56bcb14-ovn-node-metrics-cert\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.371299 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghf98\" (UniqueName: \"kubernetes.io/projected/f60e6624-ebc7-4a4c-9751-8329c56bcb14-kube-api-access-ghf98\") pod \"ovnkube-node-s58mz\" (UID: \"f60e6624-ebc7-4a4c-9751-8329c56bcb14\") " pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.538864 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.661391 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/2.log" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.662096 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/1.log" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.662173 4799 generic.go:334] "Generic (PLEG): container finished" podID="3004f2e1-bd6a-46a1-a6d9-835472f616b8" containerID="6c3bc39cc19c866dd40455a982701ba69abfab4ec1850efa28878c9028541555" exitCode=2 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.662251 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-sl7lv" event={"ID":"3004f2e1-bd6a-46a1-a6d9-835472f616b8","Type":"ContainerDied","Data":"6c3bc39cc19c866dd40455a982701ba69abfab4ec1850efa28878c9028541555"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.662309 4799 scope.go:117] "RemoveContainer" containerID="cc4bd2b4d337b25c3a57212bc50968e1ce7cfe716f539f873a37c9adde85dc4f" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.664496 4799 scope.go:117] "RemoveContainer" containerID="6c3bc39cc19c866dd40455a982701ba69abfab4ec1850efa28878c9028541555" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.665206 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-sl7lv_openshift-multus(3004f2e1-bd6a-46a1-a6d9-835472f616b8)\"" pod="openshift-multus/multus-sl7lv" podUID="3004f2e1-bd6a-46a1-a6d9-835472f616b8" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.668671 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"785449e72afe76f9757f2a048acc38140519d38f8d9f0b241a451e73d2339c5f"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.675798 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovnkube-controller/3.log" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.689888 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovn-acl-logging/0.log" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.692581 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6qqjg_6770819e-2fef-4203-9c5f-504628af7b66/ovn-controller/0.log" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.693810 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac" exitCode=0 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.693862 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" exitCode=0 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.693889 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" exitCode=0 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.693899 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6" exitCode=0 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.693906 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448" exitCode=0 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.693931 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef" exitCode=0 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.693942 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0" exitCode=143 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.693971 4799 generic.go:334] "Generic (PLEG): container finished" podID="6770819e-2fef-4203-9c5f-504628af7b66" containerID="3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7" exitCode=143 Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694000 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694055 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694071 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694080 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694088 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694102 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694150 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694162 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694203 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694211 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694216 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694221 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694226 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694231 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694238 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694243 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694252 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694261 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694290 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694295 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694300 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694305 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694313 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694318 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694323 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694328 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694333 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694340 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694368 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694375 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694380 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694386 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694390 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694396 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694401 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694405 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694410 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694415 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694422 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" event={"ID":"6770819e-2fef-4203-9c5f-504628af7b66","Type":"ContainerDied","Data":"e2ab094313baa4b16aae016079be135b984e61b988de75fb270b6d3572c1064f"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694451 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694457 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694462 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694467 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694472 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694476 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694481 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694537 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694545 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694550 4799 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.694750 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6qqjg" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.695667 4799 scope.go:117] "RemoveContainer" containerID="417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.766398 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.777351 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6qqjg"] Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.784662 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6qqjg"] Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.787531 4799 scope.go:117] "RemoveContainer" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.810365 4799 scope.go:117] "RemoveContainer" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.833549 4799 scope.go:117] "RemoveContainer" containerID="f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.848732 4799 scope.go:117] "RemoveContainer" containerID="9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.864765 4799 scope.go:117] "RemoveContainer" containerID="9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.880902 4799 scope.go:117] "RemoveContainer" containerID="f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.892896 4799 scope.go:117] "RemoveContainer" containerID="3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.896421 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-xt8bd" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.907581 4799 scope.go:117] "RemoveContainer" containerID="90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.927676 4799 scope.go:117] "RemoveContainer" containerID="417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.928211 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": container with ID starting with 417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac not found: ID does not exist" containerID="417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.928253 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} err="failed to get container status \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": rpc error: code = NotFound desc = could not find container \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": container with ID starting with 417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.928289 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.928646 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": container with ID starting with 4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258 not found: ID does not exist" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.928695 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} err="failed to get container status \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": rpc error: code = NotFound desc = could not find container \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": container with ID starting with 4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.928734 4799 scope.go:117] "RemoveContainer" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.929097 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": container with ID starting with e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365 not found: ID does not exist" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.929144 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} err="failed to get container status \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": rpc error: code = NotFound desc = could not find container \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": container with ID starting with e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.929164 4799 scope.go:117] "RemoveContainer" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.929507 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": container with ID starting with ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e not found: ID does not exist" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.929536 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} err="failed to get container status \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": rpc error: code = NotFound desc = could not find container \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": container with ID starting with ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.929555 4799 scope.go:117] "RemoveContainer" containerID="f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.929868 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": container with ID starting with f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6 not found: ID does not exist" containerID="f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.929898 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} err="failed to get container status \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": rpc error: code = NotFound desc = could not find container \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": container with ID starting with f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.929974 4799 scope.go:117] "RemoveContainer" containerID="9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.930242 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": container with ID starting with 9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448 not found: ID does not exist" containerID="9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.930264 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} err="failed to get container status \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": rpc error: code = NotFound desc = could not find container \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": container with ID starting with 9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.930277 4799 scope.go:117] "RemoveContainer" containerID="9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.930523 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": container with ID starting with 9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef not found: ID does not exist" containerID="9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.930547 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} err="failed to get container status \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": rpc error: code = NotFound desc = could not find container \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": container with ID starting with 9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.930576 4799 scope.go:117] "RemoveContainer" containerID="f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.930775 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": container with ID starting with f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0 not found: ID does not exist" containerID="f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.930801 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} err="failed to get container status \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": rpc error: code = NotFound desc = could not find container \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": container with ID starting with f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.930816 4799 scope.go:117] "RemoveContainer" containerID="3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.931014 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": container with ID starting with 3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7 not found: ID does not exist" containerID="3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.931049 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} err="failed to get container status \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": rpc error: code = NotFound desc = could not find container \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": container with ID starting with 3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.931068 4799 scope.go:117] "RemoveContainer" containerID="90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c" Jan 21 17:43:06 crc kubenswrapper[4799]: E0121 17:43:06.931315 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": container with ID starting with 90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c not found: ID does not exist" containerID="90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.931333 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} err="failed to get container status \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": rpc error: code = NotFound desc = could not find container \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": container with ID starting with 90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.931351 4799 scope.go:117] "RemoveContainer" containerID="417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.931575 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} err="failed to get container status \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": rpc error: code = NotFound desc = could not find container \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": container with ID starting with 417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.931693 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.931937 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} err="failed to get container status \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": rpc error: code = NotFound desc = could not find container \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": container with ID starting with 4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.931966 4799 scope.go:117] "RemoveContainer" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.932279 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} err="failed to get container status \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": rpc error: code = NotFound desc = could not find container \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": container with ID starting with e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.932302 4799 scope.go:117] "RemoveContainer" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.932543 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} err="failed to get container status \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": rpc error: code = NotFound desc = could not find container \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": container with ID starting with ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.932564 4799 scope.go:117] "RemoveContainer" containerID="f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.932794 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} err="failed to get container status \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": rpc error: code = NotFound desc = could not find container \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": container with ID starting with f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.932817 4799 scope.go:117] "RemoveContainer" containerID="9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933016 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} err="failed to get container status \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": rpc error: code = NotFound desc = could not find container \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": container with ID starting with 9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933037 4799 scope.go:117] "RemoveContainer" containerID="9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933254 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} err="failed to get container status \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": rpc error: code = NotFound desc = could not find container \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": container with ID starting with 9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933272 4799 scope.go:117] "RemoveContainer" containerID="f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933488 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} err="failed to get container status \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": rpc error: code = NotFound desc = could not find container \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": container with ID starting with f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933509 4799 scope.go:117] "RemoveContainer" containerID="3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933693 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} err="failed to get container status \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": rpc error: code = NotFound desc = could not find container \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": container with ID starting with 3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933722 4799 scope.go:117] "RemoveContainer" containerID="90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933909 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} err="failed to get container status \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": rpc error: code = NotFound desc = could not find container \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": container with ID starting with 90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.933927 4799 scope.go:117] "RemoveContainer" containerID="417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.934112 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} err="failed to get container status \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": rpc error: code = NotFound desc = could not find container \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": container with ID starting with 417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.934150 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.934340 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} err="failed to get container status \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": rpc error: code = NotFound desc = could not find container \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": container with ID starting with 4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.934355 4799 scope.go:117] "RemoveContainer" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.934520 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} err="failed to get container status \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": rpc error: code = NotFound desc = could not find container \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": container with ID starting with e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.934542 4799 scope.go:117] "RemoveContainer" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.934750 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} err="failed to get container status \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": rpc error: code = NotFound desc = could not find container \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": container with ID starting with ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.934769 4799 scope.go:117] "RemoveContainer" containerID="f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.935020 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} err="failed to get container status \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": rpc error: code = NotFound desc = could not find container \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": container with ID starting with f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.935045 4799 scope.go:117] "RemoveContainer" containerID="9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.935288 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} err="failed to get container status \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": rpc error: code = NotFound desc = could not find container \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": container with ID starting with 9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.935313 4799 scope.go:117] "RemoveContainer" containerID="9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.935544 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} err="failed to get container status \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": rpc error: code = NotFound desc = could not find container \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": container with ID starting with 9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.935565 4799 scope.go:117] "RemoveContainer" containerID="f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.935808 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} err="failed to get container status \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": rpc error: code = NotFound desc = could not find container \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": container with ID starting with f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.935828 4799 scope.go:117] "RemoveContainer" containerID="3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.936042 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} err="failed to get container status \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": rpc error: code = NotFound desc = could not find container \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": container with ID starting with 3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.936066 4799 scope.go:117] "RemoveContainer" containerID="90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.936320 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} err="failed to get container status \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": rpc error: code = NotFound desc = could not find container \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": container with ID starting with 90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.936347 4799 scope.go:117] "RemoveContainer" containerID="417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.936575 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac"} err="failed to get container status \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": rpc error: code = NotFound desc = could not find container \"417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac\": container with ID starting with 417ba815c1a8d451f3ee746fbeb65de9b07b4f7ceda9b1b192288f5b8bc043ac not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.936595 4799 scope.go:117] "RemoveContainer" containerID="4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.936817 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258"} err="failed to get container status \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": rpc error: code = NotFound desc = could not find container \"4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258\": container with ID starting with 4685c6b680c453e3c729932ec2c4944d5529213ee868db12108edf8222ede258 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.936840 4799 scope.go:117] "RemoveContainer" containerID="e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937066 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365"} err="failed to get container status \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": rpc error: code = NotFound desc = could not find container \"e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365\": container with ID starting with e4ca0b1bbe4153067d3a7c1ffdea4f8d9d006f1e4b5c570ed05b1782f67b1365 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937085 4799 scope.go:117] "RemoveContainer" containerID="ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937313 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e"} err="failed to get container status \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": rpc error: code = NotFound desc = could not find container \"ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e\": container with ID starting with ca3cdbc2d904e769041c726ea5229700cd94206e952244bc37f14a8a342a902e not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937334 4799 scope.go:117] "RemoveContainer" containerID="f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937533 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6"} err="failed to get container status \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": rpc error: code = NotFound desc = could not find container \"f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6\": container with ID starting with f3fd69edacff4fe3c81bdc9e8d138cfd4acd1354bc9e4bb27417035db01c60a6 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937548 4799 scope.go:117] "RemoveContainer" containerID="9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937741 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448"} err="failed to get container status \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": rpc error: code = NotFound desc = could not find container \"9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448\": container with ID starting with 9517b23f1947a0dc31b172fd8dafc92470390171181d7255f5d5eb0d82f08448 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937762 4799 scope.go:117] "RemoveContainer" containerID="9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.937989 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef"} err="failed to get container status \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": rpc error: code = NotFound desc = could not find container \"9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef\": container with ID starting with 9dcc153aa7fb1a0944ec482b16e190bbc4353f94e4914b948d05bd06872425ef not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.938011 4799 scope.go:117] "RemoveContainer" containerID="f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.938296 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0"} err="failed to get container status \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": rpc error: code = NotFound desc = could not find container \"f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0\": container with ID starting with f3524fe4159d4c32d2fe7cdd2a7c3fb00cf39d7504e841575664b2bd09ab0ef0 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.938313 4799 scope.go:117] "RemoveContainer" containerID="3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.938510 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7"} err="failed to get container status \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": rpc error: code = NotFound desc = could not find container \"3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7\": container with ID starting with 3d36ac80be598996ca41ca0a6ea262badbbde034aac3de10653bccafdf3e30d7 not found: ID does not exist" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.938532 4799 scope.go:117] "RemoveContainer" containerID="90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c" Jan 21 17:43:06 crc kubenswrapper[4799]: I0121 17:43:06.938719 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c"} err="failed to get container status \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": rpc error: code = NotFound desc = could not find container \"90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c\": container with ID starting with 90e724dcb5511152bafe9f7c009482c6dfce5e4fcd3b3597df9d32bd4eb8906c not found: ID does not exist" Jan 21 17:43:07 crc kubenswrapper[4799]: I0121 17:43:07.704357 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/2.log" Jan 21 17:43:07 crc kubenswrapper[4799]: I0121 17:43:07.706293 4799 generic.go:334] "Generic (PLEG): container finished" podID="f60e6624-ebc7-4a4c-9751-8329c56bcb14" containerID="dcb9b29457399e1b9f8a096ab334bab86d43128fef1eb237c4647742c71212e9" exitCode=0 Jan 21 17:43:07 crc kubenswrapper[4799]: I0121 17:43:07.706349 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerDied","Data":"dcb9b29457399e1b9f8a096ab334bab86d43128fef1eb237c4647742c71212e9"} Jan 21 17:43:08 crc kubenswrapper[4799]: I0121 17:43:08.214109 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6770819e-2fef-4203-9c5f-504628af7b66" path="/var/lib/kubelet/pods/6770819e-2fef-4203-9c5f-504628af7b66/volumes" Jan 21 17:43:08 crc kubenswrapper[4799]: I0121 17:43:08.718700 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"afa8874350f1d3c7b4ce4d98e80cd41fd15dd7cc70e8bb3cd8599ae394bd5912"} Jan 21 17:43:08 crc kubenswrapper[4799]: I0121 17:43:08.718788 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"73b0bc81e24bc272b8f53e99c6507b63f29e67f173ac7df43c5541157cbb0e9c"} Jan 21 17:43:08 crc kubenswrapper[4799]: I0121 17:43:08.718807 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"ea51ca373d14c848b2c3642c068e0287f5b3a69a386000730db052d20a6fc99e"} Jan 21 17:43:08 crc kubenswrapper[4799]: I0121 17:43:08.718819 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"395031d21c3c53e2898a207543c3897417379f509de8a4d078897b454d90ea13"} Jan 21 17:43:08 crc kubenswrapper[4799]: I0121 17:43:08.718849 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"0355d91cf50b207c6bfcb4bae769939886d9e1d308e72ffaa789a53d5ce90bdb"} Jan 21 17:43:08 crc kubenswrapper[4799]: I0121 17:43:08.718862 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"b2f7b64fad8724e7d3d23a46100f1a26a8ea28fb4276e2e825cc8bc3cb7b9797"} Jan 21 17:43:11 crc kubenswrapper[4799]: I0121 17:43:11.740650 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"59b4e42adfb36f41c571aab27e17ebc0792c2e281cb67d33ea94350cbe58fc39"} Jan 21 17:43:13 crc kubenswrapper[4799]: I0121 17:43:13.759925 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" event={"ID":"f60e6624-ebc7-4a4c-9751-8329c56bcb14","Type":"ContainerStarted","Data":"3426ae4e5af220466841e4002f208a37814d29703814e20ca1e7f8027a9d8e56"} Jan 21 17:43:13 crc kubenswrapper[4799]: I0121 17:43:13.760846 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:13 crc kubenswrapper[4799]: I0121 17:43:13.760919 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:13 crc kubenswrapper[4799]: I0121 17:43:13.795207 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:13 crc kubenswrapper[4799]: I0121 17:43:13.808571 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" podStartSLOduration=7.808541978 podStartE2EDuration="7.808541978s" podCreationTimestamp="2026-01-21 17:43:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:43:13.806502209 +0000 UTC m=+620.432792232" watchObservedRunningTime="2026-01-21 17:43:13.808541978 +0000 UTC m=+620.434832011" Jan 21 17:43:14 crc kubenswrapper[4799]: I0121 17:43:14.766728 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:14 crc kubenswrapper[4799]: I0121 17:43:14.800798 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:21 crc kubenswrapper[4799]: I0121 17:43:21.205522 4799 scope.go:117] "RemoveContainer" containerID="6c3bc39cc19c866dd40455a982701ba69abfab4ec1850efa28878c9028541555" Jan 21 17:43:21 crc kubenswrapper[4799]: E0121 17:43:21.206158 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-sl7lv_openshift-multus(3004f2e1-bd6a-46a1-a6d9-835472f616b8)\"" pod="openshift-multus/multus-sl7lv" podUID="3004f2e1-bd6a-46a1-a6d9-835472f616b8" Jan 21 17:43:25 crc kubenswrapper[4799]: I0121 17:43:25.970784 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:43:25 crc kubenswrapper[4799]: I0121 17:43:25.971254 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:43:35 crc kubenswrapper[4799]: I0121 17:43:35.205854 4799 scope.go:117] "RemoveContainer" containerID="6c3bc39cc19c866dd40455a982701ba69abfab4ec1850efa28878c9028541555" Jan 21 17:43:35 crc kubenswrapper[4799]: I0121 17:43:35.928851 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-sl7lv_3004f2e1-bd6a-46a1-a6d9-835472f616b8/kube-multus/2.log" Jan 21 17:43:35 crc kubenswrapper[4799]: I0121 17:43:35.929483 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-sl7lv" event={"ID":"3004f2e1-bd6a-46a1-a6d9-835472f616b8","Type":"ContainerStarted","Data":"50b49b22d84344342f1d35bb450c0542687eb1ced0316d25c3aaa268f444065d"} Jan 21 17:43:36 crc kubenswrapper[4799]: I0121 17:43:36.710061 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s58mz" Jan 21 17:43:38 crc kubenswrapper[4799]: I0121 17:43:38.759118 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr"] Jan 21 17:43:38 crc kubenswrapper[4799]: I0121 17:43:38.760919 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:38 crc kubenswrapper[4799]: I0121 17:43:38.763656 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 21 17:43:38 crc kubenswrapper[4799]: I0121 17:43:38.770324 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr"] Jan 21 17:43:38 crc kubenswrapper[4799]: I0121 17:43:38.916298 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86sgt\" (UniqueName: \"kubernetes.io/projected/7941bf3d-097c-45f0-a09c-9514ab8f672d-kube-api-access-86sgt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:38 crc kubenswrapper[4799]: I0121 17:43:38.917093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:38 crc kubenswrapper[4799]: I0121 17:43:38.917344 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.018744 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.019100 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86sgt\" (UniqueName: \"kubernetes.io/projected/7941bf3d-097c-45f0-a09c-9514ab8f672d-kube-api-access-86sgt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.019314 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.019428 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.019897 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.044413 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86sgt\" (UniqueName: \"kubernetes.io/projected/7941bf3d-097c-45f0-a09c-9514ab8f672d-kube-api-access-86sgt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.082478 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.326911 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr"] Jan 21 17:43:39 crc kubenswrapper[4799]: W0121 17:43:39.334756 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7941bf3d_097c_45f0_a09c_9514ab8f672d.slice/crio-9a6d86f78ea18dc59811e664b3b7a45a56f5296c849606ca551eee0a97302329 WatchSource:0}: Error finding container 9a6d86f78ea18dc59811e664b3b7a45a56f5296c849606ca551eee0a97302329: Status 404 returned error can't find the container with id 9a6d86f78ea18dc59811e664b3b7a45a56f5296c849606ca551eee0a97302329 Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.960251 4799 generic.go:334] "Generic (PLEG): container finished" podID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerID="ece75dd671807684d4e6e6c75547abfd3fc08d062064316f2c5e21c6790dcf21" exitCode=0 Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.960344 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" event={"ID":"7941bf3d-097c-45f0-a09c-9514ab8f672d","Type":"ContainerDied","Data":"ece75dd671807684d4e6e6c75547abfd3fc08d062064316f2c5e21c6790dcf21"} Jan 21 17:43:39 crc kubenswrapper[4799]: I0121 17:43:39.960742 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" event={"ID":"7941bf3d-097c-45f0-a09c-9514ab8f672d","Type":"ContainerStarted","Data":"9a6d86f78ea18dc59811e664b3b7a45a56f5296c849606ca551eee0a97302329"} Jan 21 17:43:43 crc kubenswrapper[4799]: I0121 17:43:43.987255 4799 generic.go:334] "Generic (PLEG): container finished" podID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerID="257fd2a9bf7008f82dd430ed1572ee6939a24df6881ec78e9449fb69c1f44e81" exitCode=0 Jan 21 17:43:43 crc kubenswrapper[4799]: I0121 17:43:43.987403 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" event={"ID":"7941bf3d-097c-45f0-a09c-9514ab8f672d","Type":"ContainerDied","Data":"257fd2a9bf7008f82dd430ed1572ee6939a24df6881ec78e9449fb69c1f44e81"} Jan 21 17:43:44 crc kubenswrapper[4799]: I0121 17:43:44.997305 4799 generic.go:334] "Generic (PLEG): container finished" podID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerID="db115f4e466a271786343d1aec751b80dc9e295a3969ee543e4830f5ae381f55" exitCode=0 Jan 21 17:43:44 crc kubenswrapper[4799]: I0121 17:43:44.997411 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" event={"ID":"7941bf3d-097c-45f0-a09c-9514ab8f672d","Type":"ContainerDied","Data":"db115f4e466a271786343d1aec751b80dc9e295a3969ee543e4830f5ae381f55"} Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.123033 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.244626 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-bundle\") pod \"7941bf3d-097c-45f0-a09c-9514ab8f672d\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.244720 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86sgt\" (UniqueName: \"kubernetes.io/projected/7941bf3d-097c-45f0-a09c-9514ab8f672d-kube-api-access-86sgt\") pod \"7941bf3d-097c-45f0-a09c-9514ab8f672d\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.244786 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-util\") pod \"7941bf3d-097c-45f0-a09c-9514ab8f672d\" (UID: \"7941bf3d-097c-45f0-a09c-9514ab8f672d\") " Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.247429 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-bundle" (OuterVolumeSpecName: "bundle") pod "7941bf3d-097c-45f0-a09c-9514ab8f672d" (UID: "7941bf3d-097c-45f0-a09c-9514ab8f672d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.251607 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7941bf3d-097c-45f0-a09c-9514ab8f672d-kube-api-access-86sgt" (OuterVolumeSpecName: "kube-api-access-86sgt") pod "7941bf3d-097c-45f0-a09c-9514ab8f672d" (UID: "7941bf3d-097c-45f0-a09c-9514ab8f672d"). InnerVolumeSpecName "kube-api-access-86sgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.256996 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-util" (OuterVolumeSpecName: "util") pod "7941bf3d-097c-45f0-a09c-9514ab8f672d" (UID: "7941bf3d-097c-45f0-a09c-9514ab8f672d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.346366 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-util\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.346824 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7941bf3d-097c-45f0-a09c-9514ab8f672d-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:47 crc kubenswrapper[4799]: I0121 17:43:47.346903 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86sgt\" (UniqueName: \"kubernetes.io/projected/7941bf3d-097c-45f0-a09c-9514ab8f672d-kube-api-access-86sgt\") on node \"crc\" DevicePath \"\"" Jan 21 17:43:48 crc kubenswrapper[4799]: I0121 17:43:48.042322 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" event={"ID":"7941bf3d-097c-45f0-a09c-9514ab8f672d","Type":"ContainerDied","Data":"9a6d86f78ea18dc59811e664b3b7a45a56f5296c849606ca551eee0a97302329"} Jan 21 17:43:48 crc kubenswrapper[4799]: I0121 17:43:48.042381 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a6d86f78ea18dc59811e664b3b7a45a56f5296c849606ca551eee0a97302329" Jan 21 17:43:48 crc kubenswrapper[4799]: I0121 17:43:48.042416 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr" Jan 21 17:43:56 crc kubenswrapper[4799]: I0121 17:43:55.990200 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:43:56 crc kubenswrapper[4799]: I0121 17:43:55.991117 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:43:56 crc kubenswrapper[4799]: I0121 17:43:55.991203 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:43:56 crc kubenswrapper[4799]: I0121 17:43:55.991972 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cf754122da61833aa1525f3575372a725cd96a25aa66c0876c3c4f82026fd7ab"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 17:43:56 crc kubenswrapper[4799]: I0121 17:43:55.992028 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://cf754122da61833aa1525f3575372a725cd96a25aa66c0876c3c4f82026fd7ab" gracePeriod=600 Jan 21 17:43:57 crc kubenswrapper[4799]: I0121 17:43:57.207349 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="cf754122da61833aa1525f3575372a725cd96a25aa66c0876c3c4f82026fd7ab" exitCode=0 Jan 21 17:43:57 crc kubenswrapper[4799]: I0121 17:43:57.207443 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"cf754122da61833aa1525f3575372a725cd96a25aa66c0876c3c4f82026fd7ab"} Jan 21 17:43:57 crc kubenswrapper[4799]: I0121 17:43:57.207543 4799 scope.go:117] "RemoveContainer" containerID="b6462558f849eafd1973a1b2319347dad0de9388ecab61e98f6ea685f2b55daa" Jan 21 17:43:58 crc kubenswrapper[4799]: I0121 17:43:58.465924 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"eed6e35e0dd567b7136adb6f803c960c31a5e8beac68fc922967bfc8623a01c5"} Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.931399 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k"] Jan 21 17:44:00 crc kubenswrapper[4799]: E0121 17:44:00.932456 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerName="pull" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.932472 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerName="pull" Jan 21 17:44:00 crc kubenswrapper[4799]: E0121 17:44:00.932483 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerName="util" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.932490 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerName="util" Jan 21 17:44:00 crc kubenswrapper[4799]: E0121 17:44:00.932506 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerName="extract" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.932512 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerName="extract" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.932632 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7941bf3d-097c-45f0-a09c-9514ab8f672d" containerName="extract" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.933138 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.940180 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.940814 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.940963 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-jtl6d" Jan 21 17:44:00 crc kubenswrapper[4799]: I0121 17:44:00.952518 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.081653 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.082570 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.085534 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.085687 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-7nxrm" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.098317 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.105783 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.106658 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.132340 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b56cc\" (UniqueName: \"kubernetes.io/projected/d743d591-b616-4e57-8395-ef3565083899-kube-api-access-b56cc\") pod \"obo-prometheus-operator-68bc856cb9-58w9k\" (UID: \"d743d591-b616-4e57-8395-ef3565083899\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.150166 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.233349 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b56cc\" (UniqueName: \"kubernetes.io/projected/d743d591-b616-4e57-8395-ef3565083899-kube-api-access-b56cc\") pod \"obo-prometheus-operator-68bc856cb9-58w9k\" (UID: \"d743d591-b616-4e57-8395-ef3565083899\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.233468 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44710bca-2659-43a9-9454-e12123e0c965-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf\" (UID: \"44710bca-2659-43a9-9454-e12123e0c965\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.233528 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77f855af-53b1-4152-bbff-c818ffa1e32e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t\" (UID: \"77f855af-53b1-4152-bbff-c818ffa1e32e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.233577 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77f855af-53b1-4152-bbff-c818ffa1e32e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t\" (UID: \"77f855af-53b1-4152-bbff-c818ffa1e32e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.234117 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44710bca-2659-43a9-9454-e12123e0c965-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf\" (UID: \"44710bca-2659-43a9-9454-e12123e0c965\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.266757 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-xrwhp"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.267619 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.273865 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.274070 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-87flp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.275176 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b56cc\" (UniqueName: \"kubernetes.io/projected/d743d591-b616-4e57-8395-ef3565083899-kube-api-access-b56cc\") pod \"obo-prometheus-operator-68bc856cb9-58w9k\" (UID: \"d743d591-b616-4e57-8395-ef3565083899\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.293426 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-xrwhp"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.335502 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77f855af-53b1-4152-bbff-c818ffa1e32e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t\" (UID: \"77f855af-53b1-4152-bbff-c818ffa1e32e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.335573 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44710bca-2659-43a9-9454-e12123e0c965-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf\" (UID: \"44710bca-2659-43a9-9454-e12123e0c965\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.335705 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44710bca-2659-43a9-9454-e12123e0c965-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf\" (UID: \"44710bca-2659-43a9-9454-e12123e0c965\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.335745 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77f855af-53b1-4152-bbff-c818ffa1e32e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t\" (UID: \"77f855af-53b1-4152-bbff-c818ffa1e32e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.345004 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44710bca-2659-43a9-9454-e12123e0c965-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf\" (UID: \"44710bca-2659-43a9-9454-e12123e0c965\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.345067 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44710bca-2659-43a9-9454-e12123e0c965-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf\" (UID: \"44710bca-2659-43a9-9454-e12123e0c965\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.346110 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/77f855af-53b1-4152-bbff-c818ffa1e32e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t\" (UID: \"77f855af-53b1-4152-bbff-c818ffa1e32e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.351829 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/77f855af-53b1-4152-bbff-c818ffa1e32e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t\" (UID: \"77f855af-53b1-4152-bbff-c818ffa1e32e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.401516 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.425544 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.437207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfqmp\" (UniqueName: \"kubernetes.io/projected/d38deaee-a893-47a5-b3d5-c1ea392a894b-kube-api-access-zfqmp\") pod \"observability-operator-59bdc8b94-xrwhp\" (UID: \"d38deaee-a893-47a5-b3d5-c1ea392a894b\") " pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.437292 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d38deaee-a893-47a5-b3d5-c1ea392a894b-observability-operator-tls\") pod \"observability-operator-59bdc8b94-xrwhp\" (UID: \"d38deaee-a893-47a5-b3d5-c1ea392a894b\") " pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.441766 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-w5tlc"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.442802 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.446357 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-f8lfn" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.466089 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-w5tlc"] Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.540909 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfqmp\" (UniqueName: \"kubernetes.io/projected/d38deaee-a893-47a5-b3d5-c1ea392a894b-kube-api-access-zfqmp\") pod \"observability-operator-59bdc8b94-xrwhp\" (UID: \"d38deaee-a893-47a5-b3d5-c1ea392a894b\") " pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.540996 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d38deaee-a893-47a5-b3d5-c1ea392a894b-observability-operator-tls\") pod \"observability-operator-59bdc8b94-xrwhp\" (UID: \"d38deaee-a893-47a5-b3d5-c1ea392a894b\") " pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.551632 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/d38deaee-a893-47a5-b3d5-c1ea392a894b-observability-operator-tls\") pod \"observability-operator-59bdc8b94-xrwhp\" (UID: \"d38deaee-a893-47a5-b3d5-c1ea392a894b\") " pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.573625 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.574201 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfqmp\" (UniqueName: \"kubernetes.io/projected/d38deaee-a893-47a5-b3d5-c1ea392a894b-kube-api-access-zfqmp\") pod \"observability-operator-59bdc8b94-xrwhp\" (UID: \"d38deaee-a893-47a5-b3d5-c1ea392a894b\") " pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.606360 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.756147 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9hzc\" (UniqueName: \"kubernetes.io/projected/3202bb55-0262-452a-9cfe-93088a43c767-kube-api-access-s9hzc\") pod \"perses-operator-5bf474d74f-w5tlc\" (UID: \"3202bb55-0262-452a-9cfe-93088a43c767\") " pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.756219 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3202bb55-0262-452a-9cfe-93088a43c767-openshift-service-ca\") pod \"perses-operator-5bf474d74f-w5tlc\" (UID: \"3202bb55-0262-452a-9cfe-93088a43c767\") " pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.858245 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9hzc\" (UniqueName: \"kubernetes.io/projected/3202bb55-0262-452a-9cfe-93088a43c767-kube-api-access-s9hzc\") pod \"perses-operator-5bf474d74f-w5tlc\" (UID: \"3202bb55-0262-452a-9cfe-93088a43c767\") " pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.858347 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3202bb55-0262-452a-9cfe-93088a43c767-openshift-service-ca\") pod \"perses-operator-5bf474d74f-w5tlc\" (UID: \"3202bb55-0262-452a-9cfe-93088a43c767\") " pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.859827 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3202bb55-0262-452a-9cfe-93088a43c767-openshift-service-ca\") pod \"perses-operator-5bf474d74f-w5tlc\" (UID: \"3202bb55-0262-452a-9cfe-93088a43c767\") " pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:01 crc kubenswrapper[4799]: I0121 17:44:01.887033 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9hzc\" (UniqueName: \"kubernetes.io/projected/3202bb55-0262-452a-9cfe-93088a43c767-kube-api-access-s9hzc\") pod \"perses-operator-5bf474d74f-w5tlc\" (UID: \"3202bb55-0262-452a-9cfe-93088a43c767\") " pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:02 crc kubenswrapper[4799]: I0121 17:44:02.100776 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:02 crc kubenswrapper[4799]: I0121 17:44:02.969271 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t"] Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.054831 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k"] Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.096045 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf"] Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.111050 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-xrwhp"] Jan 21 17:44:03 crc kubenswrapper[4799]: W0121 17:44:03.113531 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44710bca_2659_43a9_9454_e12123e0c965.slice/crio-cb3eb8883c1d833d12dabb351e1a9081496ae3141f1f5057666dcf86fd5b525f WatchSource:0}: Error finding container cb3eb8883c1d833d12dabb351e1a9081496ae3141f1f5057666dcf86fd5b525f: Status 404 returned error can't find the container with id cb3eb8883c1d833d12dabb351e1a9081496ae3141f1f5057666dcf86fd5b525f Jan 21 17:44:03 crc kubenswrapper[4799]: W0121 17:44:03.122141 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd38deaee_a893_47a5_b3d5_c1ea392a894b.slice/crio-550be56d18f469106064214b5ed25bae4a649d4f79eb14fd5a21f3f3343dfde4 WatchSource:0}: Error finding container 550be56d18f469106064214b5ed25bae4a649d4f79eb14fd5a21f3f3343dfde4: Status 404 returned error can't find the container with id 550be56d18f469106064214b5ed25bae4a649d4f79eb14fd5a21f3f3343dfde4 Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.175972 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-w5tlc"] Jan 21 17:44:03 crc kubenswrapper[4799]: W0121 17:44:03.198728 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3202bb55_0262_452a_9cfe_93088a43c767.slice/crio-90e3342d03e0090f076e5cbd3b4a3620e67acb59ced2d49dc8da41bdad9c55d9 WatchSource:0}: Error finding container 90e3342d03e0090f076e5cbd3b4a3620e67acb59ced2d49dc8da41bdad9c55d9: Status 404 returned error can't find the container with id 90e3342d03e0090f076e5cbd3b4a3620e67acb59ced2d49dc8da41bdad9c55d9 Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.609988 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" event={"ID":"d38deaee-a893-47a5-b3d5-c1ea392a894b","Type":"ContainerStarted","Data":"550be56d18f469106064214b5ed25bae4a649d4f79eb14fd5a21f3f3343dfde4"} Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.616260 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" event={"ID":"3202bb55-0262-452a-9cfe-93088a43c767","Type":"ContainerStarted","Data":"90e3342d03e0090f076e5cbd3b4a3620e67acb59ced2d49dc8da41bdad9c55d9"} Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.620962 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" event={"ID":"d743d591-b616-4e57-8395-ef3565083899","Type":"ContainerStarted","Data":"aa5c912daf9f592ad8a214a332d2b231fc7d86e40bcb6c271b466d29ee3cbbe1"} Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.624544 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" event={"ID":"44710bca-2659-43a9-9454-e12123e0c965","Type":"ContainerStarted","Data":"cb3eb8883c1d833d12dabb351e1a9081496ae3141f1f5057666dcf86fd5b525f"} Jan 21 17:44:03 crc kubenswrapper[4799]: I0121 17:44:03.626769 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" event={"ID":"77f855af-53b1-4152-bbff-c818ffa1e32e","Type":"ContainerStarted","Data":"62773dcf0f6616d05bab9407cc9ae5a5df54dbf8d42192426f387fbb7d5196d0"} Jan 21 17:44:23 crc kubenswrapper[4799]: E0121 17:44:23.662488 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 21 17:44:23 crc kubenswrapper[4799]: E0121 17:44:23.663669 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t_openshift-operators(77f855af-53b1-4152-bbff-c818ffa1e32e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:44:23 crc kubenswrapper[4799]: E0121 17:44:23.664833 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" podUID="77f855af-53b1-4152-bbff-c818ffa1e32e" Jan 21 17:44:24 crc kubenswrapper[4799]: E0121 17:44:24.127331 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" podUID="77f855af-53b1-4152-bbff-c818ffa1e32e" Jan 21 17:44:26 crc kubenswrapper[4799]: E0121 17:44:26.441182 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a" Jan 21 17:44:26 crc kubenswrapper[4799]: E0121 17:44:26.441480 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator --watch-referenced-objects-in-all-namespaces=true --disable-unmanaged-prometheus-configuration=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:9a2097bc5b2e02bc1703f64c452ce8fe4bc6775b732db930ff4770b76ae4653a,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b56cc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-68bc856cb9-58w9k_openshift-operators(d743d591-b616-4e57-8395-ef3565083899): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:44:26 crc kubenswrapper[4799]: E0121 17:44:26.442707 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" podUID="d743d591-b616-4e57-8395-ef3565083899" Jan 21 17:44:26 crc kubenswrapper[4799]: E0121 17:44:26.493975 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 21 17:44:26 crc kubenswrapper[4799]: E0121 17:44:26.494220 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf_openshift-operators(44710bca-2659-43a9-9454-e12123e0c965): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:44:26 crc kubenswrapper[4799]: E0121 17:44:26.495492 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" podUID="44710bca-2659-43a9-9454-e12123e0c965" Jan 21 17:44:27 crc kubenswrapper[4799]: E0121 17:44:27.151266 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" podUID="44710bca-2659-43a9-9454-e12123e0c965" Jan 21 17:44:27 crc kubenswrapper[4799]: E0121 17:44:27.151488 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a\\\"\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" podUID="d743d591-b616-4e57-8395-ef3565083899" Jan 21 17:44:27 crc kubenswrapper[4799]: E0121 17:44:27.457518 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8" Jan 21 17:44:27 crc kubenswrapper[4799]: E0121 17:44:27.458120 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s9hzc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5bf474d74f-w5tlc_openshift-operators(3202bb55-0262-452a-9cfe-93088a43c767): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:44:27 crc kubenswrapper[4799]: E0121 17:44:27.459411 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" podUID="3202bb55-0262-452a-9cfe-93088a43c767" Jan 21 17:44:28 crc kubenswrapper[4799]: I0121 17:44:28.157178 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" event={"ID":"d38deaee-a893-47a5-b3d5-c1ea392a894b","Type":"ContainerStarted","Data":"8e258780d075ac51d60b121d9348848c3b6e53eed63779376e8482cb777b7275"} Jan 21 17:44:28 crc kubenswrapper[4799]: E0121 17:44:28.159601 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8\\\"\"" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" podUID="3202bb55-0262-452a-9cfe-93088a43c767" Jan 21 17:44:28 crc kubenswrapper[4799]: I0121 17:44:28.223004 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" podStartSLOduration=2.883927791 podStartE2EDuration="27.222971196s" podCreationTimestamp="2026-01-21 17:44:01 +0000 UTC" firstStartedPulling="2026-01-21 17:44:03.137246043 +0000 UTC m=+669.763536066" lastFinishedPulling="2026-01-21 17:44:27.476289448 +0000 UTC m=+694.102579471" observedRunningTime="2026-01-21 17:44:28.217596753 +0000 UTC m=+694.843886786" watchObservedRunningTime="2026-01-21 17:44:28.222971196 +0000 UTC m=+694.849261219" Jan 21 17:44:29 crc kubenswrapper[4799]: I0121 17:44:29.471036 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:29 crc kubenswrapper[4799]: I0121 17:44:29.475396 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-xrwhp" Jan 21 17:44:41 crc kubenswrapper[4799]: I0121 17:44:41.759665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" event={"ID":"44710bca-2659-43a9-9454-e12123e0c965","Type":"ContainerStarted","Data":"b7519cf85f75b4083eafbc3d04f2f45f85bdd0768eb6d5fd67d83cc77c8fbfe2"} Jan 21 17:44:41 crc kubenswrapper[4799]: I0121 17:44:41.786622 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf" podStartSLOduration=3.5240806449999997 podStartE2EDuration="40.786591488s" podCreationTimestamp="2026-01-21 17:44:01 +0000 UTC" firstStartedPulling="2026-01-21 17:44:03.122379598 +0000 UTC m=+669.748669621" lastFinishedPulling="2026-01-21 17:44:40.384890441 +0000 UTC m=+707.011180464" observedRunningTime="2026-01-21 17:44:41.782576553 +0000 UTC m=+708.408866586" watchObservedRunningTime="2026-01-21 17:44:41.786591488 +0000 UTC m=+708.412881521" Jan 21 17:44:43 crc kubenswrapper[4799]: I0121 17:44:43.854477 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" event={"ID":"77f855af-53b1-4152-bbff-c818ffa1e32e","Type":"ContainerStarted","Data":"1dd012fcc9d97ab70ea68420a1b0f5f9c24fa0cb718b03b0d19757c97995cd25"} Jan 21 17:44:43 crc kubenswrapper[4799]: I0121 17:44:43.858192 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" event={"ID":"d743d591-b616-4e57-8395-ef3565083899","Type":"ContainerStarted","Data":"3d66fdf1e83ac4b2457502dc4aee0f0128a02c9ba8a7f1f52b6fdb6200a8d9a3"} Jan 21 17:44:43 crc kubenswrapper[4799]: I0121 17:44:43.861053 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" event={"ID":"3202bb55-0262-452a-9cfe-93088a43c767","Type":"ContainerStarted","Data":"55b2c8c3ddd6c318a302f33999c4dd3ad242adb725e9842ab58637b89b811a01"} Jan 21 17:44:43 crc kubenswrapper[4799]: I0121 17:44:43.861690 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:43 crc kubenswrapper[4799]: I0121 17:44:43.877075 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t" podStartSLOduration=3.306381293 podStartE2EDuration="42.877045047s" podCreationTimestamp="2026-01-21 17:44:01 +0000 UTC" firstStartedPulling="2026-01-21 17:44:02.992444545 +0000 UTC m=+669.618734568" lastFinishedPulling="2026-01-21 17:44:42.563108299 +0000 UTC m=+709.189398322" observedRunningTime="2026-01-21 17:44:43.874090392 +0000 UTC m=+710.500380415" watchObservedRunningTime="2026-01-21 17:44:43.877045047 +0000 UTC m=+710.503335060" Jan 21 17:44:44 crc kubenswrapper[4799]: I0121 17:44:44.169509 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-58w9k" podStartSLOduration=4.693371912 podStartE2EDuration="44.169480434s" podCreationTimestamp="2026-01-21 17:44:00 +0000 UTC" firstStartedPulling="2026-01-21 17:44:03.084753393 +0000 UTC m=+669.711043416" lastFinishedPulling="2026-01-21 17:44:42.560861915 +0000 UTC m=+709.187151938" observedRunningTime="2026-01-21 17:44:44.16933489 +0000 UTC m=+710.795624913" watchObservedRunningTime="2026-01-21 17:44:44.169480434 +0000 UTC m=+710.795770457" Jan 21 17:44:44 crc kubenswrapper[4799]: I0121 17:44:44.229472 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" podStartSLOduration=3.87280137 podStartE2EDuration="43.229451248s" podCreationTimestamp="2026-01-21 17:44:01 +0000 UTC" firstStartedPulling="2026-01-21 17:44:03.204298789 +0000 UTC m=+669.830588812" lastFinishedPulling="2026-01-21 17:44:42.560948667 +0000 UTC m=+709.187238690" observedRunningTime="2026-01-21 17:44:44.218435403 +0000 UTC m=+710.844725416" watchObservedRunningTime="2026-01-21 17:44:44.229451248 +0000 UTC m=+710.855741271" Jan 21 17:44:52 crc kubenswrapper[4799]: I0121 17:44:52.104941 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-w5tlc" Jan 21 17:44:54 crc kubenswrapper[4799]: I0121 17:44:54.473438 4799 scope.go:117] "RemoveContainer" containerID="8b9f373f0eb4d712ddcd115818000626281c065437118238894465f58d15cb8f" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.182180 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8"] Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.184008 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.187544 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.188810 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.194847 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8"] Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.256214 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e314447a-b8d1-465f-99fb-4f684b091913-config-volume\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.256300 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdldb\" (UniqueName: \"kubernetes.io/projected/e314447a-b8d1-465f-99fb-4f684b091913-kube-api-access-rdldb\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.256323 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e314447a-b8d1-465f-99fb-4f684b091913-secret-volume\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.358382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e314447a-b8d1-465f-99fb-4f684b091913-config-volume\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.358911 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdldb\" (UniqueName: \"kubernetes.io/projected/e314447a-b8d1-465f-99fb-4f684b091913-kube-api-access-rdldb\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.358946 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e314447a-b8d1-465f-99fb-4f684b091913-secret-volume\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.360184 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e314447a-b8d1-465f-99fb-4f684b091913-config-volume\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.373571 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e314447a-b8d1-465f-99fb-4f684b091913-secret-volume\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.382986 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdldb\" (UniqueName: \"kubernetes.io/projected/e314447a-b8d1-465f-99fb-4f684b091913-kube-api-access-rdldb\") pod \"collect-profiles-29483625-xnkj8\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.585972 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.884213 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8"] Jan 21 17:45:00 crc kubenswrapper[4799]: I0121 17:45:00.999484 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" event={"ID":"e314447a-b8d1-465f-99fb-4f684b091913","Type":"ContainerStarted","Data":"070b55dc5da2ef06f38428d8c537bfc0698cfc6a03214dbe31c4234144bf9dcf"} Jan 21 17:45:02 crc kubenswrapper[4799]: I0121 17:45:02.010446 4799 generic.go:334] "Generic (PLEG): container finished" podID="e314447a-b8d1-465f-99fb-4f684b091913" containerID="c820d5fd6effdb8bd9d0232f67599b7193a0ed0ec2ddd9613513be1c3a8a8358" exitCode=0 Jan 21 17:45:02 crc kubenswrapper[4799]: I0121 17:45:02.010632 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" event={"ID":"e314447a-b8d1-465f-99fb-4f684b091913","Type":"ContainerDied","Data":"c820d5fd6effdb8bd9d0232f67599b7193a0ed0ec2ddd9613513be1c3a8a8358"} Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.280900 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.329423 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdldb\" (UniqueName: \"kubernetes.io/projected/e314447a-b8d1-465f-99fb-4f684b091913-kube-api-access-rdldb\") pod \"e314447a-b8d1-465f-99fb-4f684b091913\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.329481 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e314447a-b8d1-465f-99fb-4f684b091913-config-volume\") pod \"e314447a-b8d1-465f-99fb-4f684b091913\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.329533 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e314447a-b8d1-465f-99fb-4f684b091913-secret-volume\") pod \"e314447a-b8d1-465f-99fb-4f684b091913\" (UID: \"e314447a-b8d1-465f-99fb-4f684b091913\") " Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.330520 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e314447a-b8d1-465f-99fb-4f684b091913-config-volume" (OuterVolumeSpecName: "config-volume") pod "e314447a-b8d1-465f-99fb-4f684b091913" (UID: "e314447a-b8d1-465f-99fb-4f684b091913"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.337809 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e314447a-b8d1-465f-99fb-4f684b091913-kube-api-access-rdldb" (OuterVolumeSpecName: "kube-api-access-rdldb") pod "e314447a-b8d1-465f-99fb-4f684b091913" (UID: "e314447a-b8d1-465f-99fb-4f684b091913"). InnerVolumeSpecName "kube-api-access-rdldb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.344074 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e314447a-b8d1-465f-99fb-4f684b091913-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e314447a-b8d1-465f-99fb-4f684b091913" (UID: "e314447a-b8d1-465f-99fb-4f684b091913"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.430524 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdldb\" (UniqueName: \"kubernetes.io/projected/e314447a-b8d1-465f-99fb-4f684b091913-kube-api-access-rdldb\") on node \"crc\" DevicePath \"\"" Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.430571 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e314447a-b8d1-465f-99fb-4f684b091913-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 17:45:03 crc kubenswrapper[4799]: I0121 17:45:03.430581 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e314447a-b8d1-465f-99fb-4f684b091913-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 17:45:04 crc kubenswrapper[4799]: I0121 17:45:04.025344 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" event={"ID":"e314447a-b8d1-465f-99fb-4f684b091913","Type":"ContainerDied","Data":"070b55dc5da2ef06f38428d8c537bfc0698cfc6a03214dbe31c4234144bf9dcf"} Jan 21 17:45:04 crc kubenswrapper[4799]: I0121 17:45:04.025425 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="070b55dc5da2ef06f38428d8c537bfc0698cfc6a03214dbe31c4234144bf9dcf" Jan 21 17:45:04 crc kubenswrapper[4799]: I0121 17:45:04.025432 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.525717 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc"] Jan 21 17:45:12 crc kubenswrapper[4799]: E0121 17:45:12.527016 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e314447a-b8d1-465f-99fb-4f684b091913" containerName="collect-profiles" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.527064 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e314447a-b8d1-465f-99fb-4f684b091913" containerName="collect-profiles" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.527335 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e314447a-b8d1-465f-99fb-4f684b091913" containerName="collect-profiles" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.528690 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.538302 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc"] Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.539483 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.610813 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.610886 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.610930 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdcnk\" (UniqueName: \"kubernetes.io/projected/fd2efc6b-139c-4450-8665-e5d4a013ed30-kube-api-access-sdcnk\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.712475 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.712537 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.712561 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdcnk\" (UniqueName: \"kubernetes.io/projected/fd2efc6b-139c-4450-8665-e5d4a013ed30-kube-api-access-sdcnk\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.713305 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.713376 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.736242 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdcnk\" (UniqueName: \"kubernetes.io/projected/fd2efc6b-139c-4450-8665-e5d4a013ed30-kube-api-access-sdcnk\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:12 crc kubenswrapper[4799]: I0121 17:45:12.849596 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:13 crc kubenswrapper[4799]: I0121 17:45:13.184708 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc"] Jan 21 17:45:14 crc kubenswrapper[4799]: I0121 17:45:14.172445 4799 generic.go:334] "Generic (PLEG): container finished" podID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerID="ed3fe9ddd74aa9e386327d8fdd665110ad1ad27df6597f4d46e9c319a5f4a94d" exitCode=0 Jan 21 17:45:14 crc kubenswrapper[4799]: I0121 17:45:14.172574 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" event={"ID":"fd2efc6b-139c-4450-8665-e5d4a013ed30","Type":"ContainerDied","Data":"ed3fe9ddd74aa9e386327d8fdd665110ad1ad27df6597f4d46e9c319a5f4a94d"} Jan 21 17:45:14 crc kubenswrapper[4799]: I0121 17:45:14.172839 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" event={"ID":"fd2efc6b-139c-4450-8665-e5d4a013ed30","Type":"ContainerStarted","Data":"6812b7f2d60054d6d25da7c6601960c8bf716fa7469f6d25eff1b16190a50a9e"} Jan 21 17:45:16 crc kubenswrapper[4799]: I0121 17:45:16.188586 4799 generic.go:334] "Generic (PLEG): container finished" podID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerID="91f681ee16522c8bd9bd0cedfdfe58d64875dcf8c14b44137a8aacfe3e0d9523" exitCode=0 Jan 21 17:45:16 crc kubenswrapper[4799]: I0121 17:45:16.188661 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" event={"ID":"fd2efc6b-139c-4450-8665-e5d4a013ed30","Type":"ContainerDied","Data":"91f681ee16522c8bd9bd0cedfdfe58d64875dcf8c14b44137a8aacfe3e0d9523"} Jan 21 17:45:17 crc kubenswrapper[4799]: I0121 17:45:17.196235 4799 generic.go:334] "Generic (PLEG): container finished" podID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerID="09bcd89840c9e39ce0101b235e6eba8583fb40a98da4cbb7175d651b3e53b019" exitCode=0 Jan 21 17:45:17 crc kubenswrapper[4799]: I0121 17:45:17.196288 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" event={"ID":"fd2efc6b-139c-4450-8665-e5d4a013ed30","Type":"ContainerDied","Data":"09bcd89840c9e39ce0101b235e6eba8583fb40a98da4cbb7175d651b3e53b019"} Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.499012 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.639781 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdcnk\" (UniqueName: \"kubernetes.io/projected/fd2efc6b-139c-4450-8665-e5d4a013ed30-kube-api-access-sdcnk\") pod \"fd2efc6b-139c-4450-8665-e5d4a013ed30\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.639982 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-util\") pod \"fd2efc6b-139c-4450-8665-e5d4a013ed30\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.640148 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-bundle\") pod \"fd2efc6b-139c-4450-8665-e5d4a013ed30\" (UID: \"fd2efc6b-139c-4450-8665-e5d4a013ed30\") " Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.641023 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-bundle" (OuterVolumeSpecName: "bundle") pod "fd2efc6b-139c-4450-8665-e5d4a013ed30" (UID: "fd2efc6b-139c-4450-8665-e5d4a013ed30"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.650723 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd2efc6b-139c-4450-8665-e5d4a013ed30-kube-api-access-sdcnk" (OuterVolumeSpecName: "kube-api-access-sdcnk") pod "fd2efc6b-139c-4450-8665-e5d4a013ed30" (UID: "fd2efc6b-139c-4450-8665-e5d4a013ed30"). InnerVolumeSpecName "kube-api-access-sdcnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.660770 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-util" (OuterVolumeSpecName: "util") pod "fd2efc6b-139c-4450-8665-e5d4a013ed30" (UID: "fd2efc6b-139c-4450-8665-e5d4a013ed30"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.742452 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.742852 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdcnk\" (UniqueName: \"kubernetes.io/projected/fd2efc6b-139c-4450-8665-e5d4a013ed30-kube-api-access-sdcnk\") on node \"crc\" DevicePath \"\"" Jan 21 17:45:18 crc kubenswrapper[4799]: I0121 17:45:18.743055 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fd2efc6b-139c-4450-8665-e5d4a013ed30-util\") on node \"crc\" DevicePath \"\"" Jan 21 17:45:19 crc kubenswrapper[4799]: I0121 17:45:19.228222 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" event={"ID":"fd2efc6b-139c-4450-8665-e5d4a013ed30","Type":"ContainerDied","Data":"6812b7f2d60054d6d25da7c6601960c8bf716fa7469f6d25eff1b16190a50a9e"} Jan 21 17:45:19 crc kubenswrapper[4799]: I0121 17:45:19.228975 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6812b7f2d60054d6d25da7c6601960c8bf716fa7469f6d25eff1b16190a50a9e" Jan 21 17:45:19 crc kubenswrapper[4799]: I0121 17:45:19.228292 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.147024 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-njm8b"] Jan 21 17:45:24 crc kubenswrapper[4799]: E0121 17:45:24.147353 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerName="util" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.147373 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerName="util" Jan 21 17:45:24 crc kubenswrapper[4799]: E0121 17:45:24.147381 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerName="pull" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.147386 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerName="pull" Jan 21 17:45:24 crc kubenswrapper[4799]: E0121 17:45:24.147397 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerName="extract" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.147403 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerName="extract" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.147551 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd2efc6b-139c-4450-8665-e5d4a013ed30" containerName="extract" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.148092 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-njm8b" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.151087 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfcdk\" (UniqueName: \"kubernetes.io/projected/fd2c58b3-6fc3-4391-8397-c2b1078e48b8-kube-api-access-dfcdk\") pod \"nmstate-operator-646758c888-njm8b\" (UID: \"fd2c58b3-6fc3-4391-8397-c2b1078e48b8\") " pod="openshift-nmstate/nmstate-operator-646758c888-njm8b" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.152015 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.152321 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.153106 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-mb5q6" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.159825 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-njm8b"] Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.252210 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfcdk\" (UniqueName: \"kubernetes.io/projected/fd2c58b3-6fc3-4391-8397-c2b1078e48b8-kube-api-access-dfcdk\") pod \"nmstate-operator-646758c888-njm8b\" (UID: \"fd2c58b3-6fc3-4391-8397-c2b1078e48b8\") " pod="openshift-nmstate/nmstate-operator-646758c888-njm8b" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.365326 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfcdk\" (UniqueName: \"kubernetes.io/projected/fd2c58b3-6fc3-4391-8397-c2b1078e48b8-kube-api-access-dfcdk\") pod \"nmstate-operator-646758c888-njm8b\" (UID: \"fd2c58b3-6fc3-4391-8397-c2b1078e48b8\") " pod="openshift-nmstate/nmstate-operator-646758c888-njm8b" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.468688 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-njm8b" Jan 21 17:45:24 crc kubenswrapper[4799]: I0121 17:45:24.987728 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-njm8b"] Jan 21 17:45:25 crc kubenswrapper[4799]: I0121 17:45:25.381867 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-njm8b" event={"ID":"fd2c58b3-6fc3-4391-8397-c2b1078e48b8","Type":"ContainerStarted","Data":"650e8f5e9410672e14272addd9818327c010caf418f5fe907dfccbea16e254d5"} Jan 21 17:45:28 crc kubenswrapper[4799]: I0121 17:45:28.404357 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-njm8b" event={"ID":"fd2c58b3-6fc3-4391-8397-c2b1078e48b8","Type":"ContainerStarted","Data":"a4cb6e608e9bcd21f1e20b4c27f5ed2e8c37efa757eedd5789a75f36f06dd85e"} Jan 21 17:45:28 crc kubenswrapper[4799]: I0121 17:45:28.428147 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-njm8b" podStartSLOduration=2.027222848 podStartE2EDuration="4.428075403s" podCreationTimestamp="2026-01-21 17:45:24 +0000 UTC" firstStartedPulling="2026-01-21 17:45:25.004891356 +0000 UTC m=+751.631181379" lastFinishedPulling="2026-01-21 17:45:27.405743911 +0000 UTC m=+754.032033934" observedRunningTime="2026-01-21 17:45:28.424122401 +0000 UTC m=+755.050412424" watchObservedRunningTime="2026-01-21 17:45:28.428075403 +0000 UTC m=+755.054365426" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.751402 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-qcsdq"] Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.752868 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.754988 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-c6kmg" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.762197 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27"] Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.763625 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.765306 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.776249 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-qcsdq"] Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.784591 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27"] Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.795720 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-xcfs7"] Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.800327 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.838079 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-ovs-socket\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.838169 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nn4j\" (UniqueName: \"kubernetes.io/projected/f1d83e65-f17f-4802-a0be-536f18cfe6e2-kube-api-access-6nn4j\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.838201 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9445e980-390c-4759-9dcb-aa2a906f773a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-xkl27\" (UID: \"9445e980-390c-4759-9dcb-aa2a906f773a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.838532 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-nmstate-lock\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.838957 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg42p\" (UniqueName: \"kubernetes.io/projected/eeedecf6-13c6-4102-a889-a3cec17f120c-kube-api-access-qg42p\") pod \"nmstate-metrics-54757c584b-qcsdq\" (UID: \"eeedecf6-13c6-4102-a889-a3cec17f120c\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.839004 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb2c9\" (UniqueName: \"kubernetes.io/projected/9445e980-390c-4759-9dcb-aa2a906f773a-kube-api-access-xb2c9\") pod \"nmstate-webhook-8474b5b9d8-xkl27\" (UID: \"9445e980-390c-4759-9dcb-aa2a906f773a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.839052 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-dbus-socket\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.984903 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-ovs-socket\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.985008 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nn4j\" (UniqueName: \"kubernetes.io/projected/f1d83e65-f17f-4802-a0be-536f18cfe6e2-kube-api-access-6nn4j\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.985074 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9445e980-390c-4759-9dcb-aa2a906f773a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-xkl27\" (UID: \"9445e980-390c-4759-9dcb-aa2a906f773a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.985171 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-nmstate-lock\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.985430 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg42p\" (UniqueName: \"kubernetes.io/projected/eeedecf6-13c6-4102-a889-a3cec17f120c-kube-api-access-qg42p\") pod \"nmstate-metrics-54757c584b-qcsdq\" (UID: \"eeedecf6-13c6-4102-a889-a3cec17f120c\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.985530 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb2c9\" (UniqueName: \"kubernetes.io/projected/9445e980-390c-4759-9dcb-aa2a906f773a-kube-api-access-xb2c9\") pod \"nmstate-webhook-8474b5b9d8-xkl27\" (UID: \"9445e980-390c-4759-9dcb-aa2a906f773a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.985632 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-dbus-socket\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: E0121 17:45:33.985756 4799 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Jan 21 17:45:33 crc kubenswrapper[4799]: E0121 17:45:33.985891 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9445e980-390c-4759-9dcb-aa2a906f773a-tls-key-pair podName:9445e980-390c-4759-9dcb-aa2a906f773a nodeName:}" failed. No retries permitted until 2026-01-21 17:45:34.4858572 +0000 UTC m=+761.112147223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/9445e980-390c-4759-9dcb-aa2a906f773a-tls-key-pair") pod "nmstate-webhook-8474b5b9d8-xkl27" (UID: "9445e980-390c-4759-9dcb-aa2a906f773a") : secret "openshift-nmstate-webhook" not found Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.986454 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-nmstate-lock\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.986710 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-ovs-socket\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:33 crc kubenswrapper[4799]: I0121 17:45:33.987283 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f1d83e65-f17f-4802-a0be-536f18cfe6e2-dbus-socket\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.013590 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nn4j\" (UniqueName: \"kubernetes.io/projected/f1d83e65-f17f-4802-a0be-536f18cfe6e2-kube-api-access-6nn4j\") pod \"nmstate-handler-xcfs7\" (UID: \"f1d83e65-f17f-4802-a0be-536f18cfe6e2\") " pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.020934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb2c9\" (UniqueName: \"kubernetes.io/projected/9445e980-390c-4759-9dcb-aa2a906f773a-kube-api-access-xb2c9\") pod \"nmstate-webhook-8474b5b9d8-xkl27\" (UID: \"9445e980-390c-4759-9dcb-aa2a906f773a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.036607 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg42p\" (UniqueName: \"kubernetes.io/projected/eeedecf6-13c6-4102-a889-a3cec17f120c-kube-api-access-qg42p\") pod \"nmstate-metrics-54757c584b-qcsdq\" (UID: \"eeedecf6-13c6-4102-a889-a3cec17f120c\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.087227 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.102602 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f"] Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.103767 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.107369 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.107637 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.107920 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-r4r9k" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.130719 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f"] Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.139409 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.292043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spjdf\" (UniqueName: \"kubernetes.io/projected/777bc3b4-a1aa-42ec-8639-f08d14be32b4-kube-api-access-spjdf\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.292508 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/777bc3b4-a1aa-42ec-8639-f08d14be32b4-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.292547 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/777bc3b4-a1aa-42ec-8639-f08d14be32b4-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.292699 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7589c5ddb-vrb7c"] Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.293648 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.322074 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7589c5ddb-vrb7c"] Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.393872 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-oauth-serving-cert\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.393959 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-service-ca\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.393994 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spjdf\" (UniqueName: \"kubernetes.io/projected/777bc3b4-a1aa-42ec-8639-f08d14be32b4-kube-api-access-spjdf\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.394024 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/777bc3b4-a1aa-42ec-8639-f08d14be32b4-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.394060 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-trusted-ca-bundle\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.394103 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-serving-cert\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.394132 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjhfv\" (UniqueName: \"kubernetes.io/projected/89f8350f-4f54-4e22-abd7-f622b8ebdf71-kube-api-access-bjhfv\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.394177 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/777bc3b4-a1aa-42ec-8639-f08d14be32b4-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.394203 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-config\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.394248 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-oauth-config\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.398260 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/777bc3b4-a1aa-42ec-8639-f08d14be32b4-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.400036 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/777bc3b4-a1aa-42ec-8639-f08d14be32b4-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.400068 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-qcsdq"] Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.418496 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spjdf\" (UniqueName: \"kubernetes.io/projected/777bc3b4-a1aa-42ec-8639-f08d14be32b4-kube-api-access-spjdf\") pod \"nmstate-console-plugin-7754f76f8b-84q5f\" (UID: \"777bc3b4-a1aa-42ec-8639-f08d14be32b4\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.451873 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xcfs7" event={"ID":"f1d83e65-f17f-4802-a0be-536f18cfe6e2","Type":"ContainerStarted","Data":"56c2eb793410a0c3c45152ed9e6a141f3088d955fd156b8479a09b4a6cbc1aeb"} Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.453734 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" event={"ID":"eeedecf6-13c6-4102-a889-a3cec17f120c","Type":"ContainerStarted","Data":"220daa3051d8ed9f6ff332c560041eaa4aac32bb1d178eaa1d089ddb91c2a282"} Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.462524 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.495922 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-trusted-ca-bundle\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.496016 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-serving-cert\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.496056 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjhfv\" (UniqueName: \"kubernetes.io/projected/89f8350f-4f54-4e22-abd7-f622b8ebdf71-kube-api-access-bjhfv\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.496092 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-config\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.496176 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-oauth-config\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.496276 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9445e980-390c-4759-9dcb-aa2a906f773a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-xkl27\" (UID: \"9445e980-390c-4759-9dcb-aa2a906f773a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.496323 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-oauth-serving-cert\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.496377 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-service-ca\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.497703 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-service-ca\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.498605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-config\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.498623 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-oauth-serving-cert\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.498708 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89f8350f-4f54-4e22-abd7-f622b8ebdf71-trusted-ca-bundle\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.501064 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-oauth-config\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.501605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/9445e980-390c-4759-9dcb-aa2a906f773a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-xkl27\" (UID: \"9445e980-390c-4759-9dcb-aa2a906f773a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.502794 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/89f8350f-4f54-4e22-abd7-f622b8ebdf71-console-serving-cert\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.517156 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjhfv\" (UniqueName: \"kubernetes.io/projected/89f8350f-4f54-4e22-abd7-f622b8ebdf71-kube-api-access-bjhfv\") pod \"console-7589c5ddb-vrb7c\" (UID: \"89f8350f-4f54-4e22-abd7-f622b8ebdf71\") " pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.622841 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:34 crc kubenswrapper[4799]: I0121 17:45:34.694682 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:35 crc kubenswrapper[4799]: I0121 17:45:35.055246 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f"] Jan 21 17:45:35 crc kubenswrapper[4799]: I0121 17:45:35.148903 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27"] Jan 21 17:45:35 crc kubenswrapper[4799]: I0121 17:45:35.188449 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7589c5ddb-vrb7c"] Jan 21 17:45:35 crc kubenswrapper[4799]: I0121 17:45:35.462641 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" event={"ID":"777bc3b4-a1aa-42ec-8639-f08d14be32b4","Type":"ContainerStarted","Data":"b1cdad0321aeb164d28fd61dc4441ce08056bc09dac137b5a0aa723dd36656db"} Jan 21 17:45:35 crc kubenswrapper[4799]: I0121 17:45:35.465375 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7589c5ddb-vrb7c" event={"ID":"89f8350f-4f54-4e22-abd7-f622b8ebdf71","Type":"ContainerStarted","Data":"83bb5bcd27ae75c8940c6e04ff9a256661ad5c0fe59ca7d8f64101f64a446189"} Jan 21 17:45:35 crc kubenswrapper[4799]: I0121 17:45:35.465406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7589c5ddb-vrb7c" event={"ID":"89f8350f-4f54-4e22-abd7-f622b8ebdf71","Type":"ContainerStarted","Data":"691fa50b60756ff7392a5c50871761530fd8d6203ef122641726471ec093a61c"} Jan 21 17:45:35 crc kubenswrapper[4799]: I0121 17:45:35.469952 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" event={"ID":"9445e980-390c-4759-9dcb-aa2a906f773a","Type":"ContainerStarted","Data":"46241f8996ef8e66bdf744aca7482b64a25ea3db12d3712d51bb9e1c638240fd"} Jan 21 17:45:35 crc kubenswrapper[4799]: I0121 17:45:35.494129 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7589c5ddb-vrb7c" podStartSLOduration=1.494098653 podStartE2EDuration="1.494098653s" podCreationTimestamp="2026-01-21 17:45:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:45:35.490239544 +0000 UTC m=+762.116529587" watchObservedRunningTime="2026-01-21 17:45:35.494098653 +0000 UTC m=+762.120388676" Jan 21 17:45:38 crc kubenswrapper[4799]: I0121 17:45:38.501440 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xcfs7" event={"ID":"f1d83e65-f17f-4802-a0be-536f18cfe6e2","Type":"ContainerStarted","Data":"66d2cfa475956e2ed67b7605ff143454b5e1bea33503cee69674d09f761acb6c"} Jan 21 17:45:38 crc kubenswrapper[4799]: I0121 17:45:38.502678 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:38 crc kubenswrapper[4799]: I0121 17:45:38.507704 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" event={"ID":"9445e980-390c-4759-9dcb-aa2a906f773a","Type":"ContainerStarted","Data":"1fc1207c0c686bd0b65f5e8d6cc46252d60fa72e5ae73413a0e53d17d1bde16b"} Jan 21 17:45:38 crc kubenswrapper[4799]: I0121 17:45:38.507854 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:38 crc kubenswrapper[4799]: I0121 17:45:38.510983 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" event={"ID":"eeedecf6-13c6-4102-a889-a3cec17f120c","Type":"ContainerStarted","Data":"5972eccf642a866400394a8cda2ba59c7b455459131509e0e9b59946d45cbec1"} Jan 21 17:45:38 crc kubenswrapper[4799]: I0121 17:45:38.533695 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-xcfs7" podStartSLOduration=2.374042528 podStartE2EDuration="5.533673876s" podCreationTimestamp="2026-01-21 17:45:33 +0000 UTC" firstStartedPulling="2026-01-21 17:45:34.169080572 +0000 UTC m=+760.795370595" lastFinishedPulling="2026-01-21 17:45:37.32871192 +0000 UTC m=+763.955001943" observedRunningTime="2026-01-21 17:45:38.530604539 +0000 UTC m=+765.156894572" watchObservedRunningTime="2026-01-21 17:45:38.533673876 +0000 UTC m=+765.159963899" Jan 21 17:45:38 crc kubenswrapper[4799]: I0121 17:45:38.555398 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" podStartSLOduration=3.307207863 podStartE2EDuration="5.555368693s" podCreationTimestamp="2026-01-21 17:45:33 +0000 UTC" firstStartedPulling="2026-01-21 17:45:35.150260263 +0000 UTC m=+761.776550286" lastFinishedPulling="2026-01-21 17:45:37.398421093 +0000 UTC m=+764.024711116" observedRunningTime="2026-01-21 17:45:38.548595891 +0000 UTC m=+765.174885914" watchObservedRunningTime="2026-01-21 17:45:38.555368693 +0000 UTC m=+765.181658716" Jan 21 17:45:39 crc kubenswrapper[4799]: I0121 17:45:39.521257 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" event={"ID":"777bc3b4-a1aa-42ec-8639-f08d14be32b4","Type":"ContainerStarted","Data":"09e7832e3a87674eb1234656c206f29dfe7216f1a35c8a2890822a31282920a2"} Jan 21 17:45:39 crc kubenswrapper[4799]: I0121 17:45:39.559956 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-84q5f" podStartSLOduration=2.208561657 podStartE2EDuration="5.559925809s" podCreationTimestamp="2026-01-21 17:45:34 +0000 UTC" firstStartedPulling="2026-01-21 17:45:35.07985179 +0000 UTC m=+761.706141823" lastFinishedPulling="2026-01-21 17:45:38.431215952 +0000 UTC m=+765.057505975" observedRunningTime="2026-01-21 17:45:39.538784778 +0000 UTC m=+766.165074811" watchObservedRunningTime="2026-01-21 17:45:39.559925809 +0000 UTC m=+766.186215832" Jan 21 17:45:40 crc kubenswrapper[4799]: I0121 17:45:40.530872 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" event={"ID":"eeedecf6-13c6-4102-a889-a3cec17f120c","Type":"ContainerStarted","Data":"2ce6f49ac1ea7cbe8382573e4a68fbd0af0e44f6e6a5f84869bf6f46eb04d7c9"} Jan 21 17:45:40 crc kubenswrapper[4799]: I0121 17:45:40.553894 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-qcsdq" podStartSLOduration=1.800243446 podStartE2EDuration="7.553870393s" podCreationTimestamp="2026-01-21 17:45:33 +0000 UTC" firstStartedPulling="2026-01-21 17:45:34.41225956 +0000 UTC m=+761.038549583" lastFinishedPulling="2026-01-21 17:45:40.165886507 +0000 UTC m=+766.792176530" observedRunningTime="2026-01-21 17:45:40.550286161 +0000 UTC m=+767.176576204" watchObservedRunningTime="2026-01-21 17:45:40.553870393 +0000 UTC m=+767.180160416" Jan 21 17:45:44 crc kubenswrapper[4799]: I0121 17:45:44.169119 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-xcfs7" Jan 21 17:45:44 crc kubenswrapper[4799]: I0121 17:45:44.623762 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:44 crc kubenswrapper[4799]: I0121 17:45:44.623842 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:44 crc kubenswrapper[4799]: I0121 17:45:44.629459 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:45 crc kubenswrapper[4799]: I0121 17:45:45.676752 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7589c5ddb-vrb7c" Jan 21 17:45:45 crc kubenswrapper[4799]: I0121 17:45:45.729225 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-m875t"] Jan 21 17:45:54 crc kubenswrapper[4799]: I0121 17:45:54.700500 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-xkl27" Jan 21 17:45:56 crc kubenswrapper[4799]: I0121 17:45:56.900112 4799 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.505734 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf"] Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.508536 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.511388 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.520019 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf"] Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.535436 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.535530 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ggqv\" (UniqueName: \"kubernetes.io/projected/dfb67070-f383-42b8-bb55-1406f6994a95-kube-api-access-2ggqv\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.535664 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.637807 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.637902 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ggqv\" (UniqueName: \"kubernetes.io/projected/dfb67070-f383-42b8-bb55-1406f6994a95-kube-api-access-2ggqv\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.638013 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.638663 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.639087 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.666504 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ggqv\" (UniqueName: \"kubernetes.io/projected/dfb67070-f383-42b8-bb55-1406f6994a95-kube-api-access-2ggqv\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:09 crc kubenswrapper[4799]: I0121 17:46:09.834140 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:10 crc kubenswrapper[4799]: I0121 17:46:10.299985 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf"] Jan 21 17:46:10 crc kubenswrapper[4799]: I0121 17:46:10.774093 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-m875t" podUID="46c59bb9-7544-496f-a38c-1054b3b95ae8" containerName="console" containerID="cri-o://e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79" gracePeriod=15 Jan 21 17:46:10 crc kubenswrapper[4799]: I0121 17:46:10.863631 4799 generic.go:334] "Generic (PLEG): container finished" podID="dfb67070-f383-42b8-bb55-1406f6994a95" containerID="b74b6ec50dafd1ca7fb19dfa979597ba1de041c857c66675993e973acb3e4b64" exitCode=0 Jan 21 17:46:10 crc kubenswrapper[4799]: I0121 17:46:10.863711 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" event={"ID":"dfb67070-f383-42b8-bb55-1406f6994a95","Type":"ContainerDied","Data":"b74b6ec50dafd1ca7fb19dfa979597ba1de041c857c66675993e973acb3e4b64"} Jan 21 17:46:10 crc kubenswrapper[4799]: I0121 17:46:10.863780 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" event={"ID":"dfb67070-f383-42b8-bb55-1406f6994a95","Type":"ContainerStarted","Data":"38b8accc52c6de264251509158903d81bd79b8472b5499c949809ec342a95dfe"} Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.344432 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-m875t_46c59bb9-7544-496f-a38c-1054b3b95ae8/console/0.log" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.345068 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.531462 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-oauth-serving-cert\") pod \"46c59bb9-7544-496f-a38c-1054b3b95ae8\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.531522 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-oauth-config\") pod \"46c59bb9-7544-496f-a38c-1054b3b95ae8\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.531568 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-serving-cert\") pod \"46c59bb9-7544-496f-a38c-1054b3b95ae8\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.531592 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-trusted-ca-bundle\") pod \"46c59bb9-7544-496f-a38c-1054b3b95ae8\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.531633 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh9r6\" (UniqueName: \"kubernetes.io/projected/46c59bb9-7544-496f-a38c-1054b3b95ae8-kube-api-access-sh9r6\") pod \"46c59bb9-7544-496f-a38c-1054b3b95ae8\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.531714 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-service-ca\") pod \"46c59bb9-7544-496f-a38c-1054b3b95ae8\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.531832 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-config\") pod \"46c59bb9-7544-496f-a38c-1054b3b95ae8\" (UID: \"46c59bb9-7544-496f-a38c-1054b3b95ae8\") " Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.532762 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "46c59bb9-7544-496f-a38c-1054b3b95ae8" (UID: "46c59bb9-7544-496f-a38c-1054b3b95ae8"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.532891 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-config" (OuterVolumeSpecName: "console-config") pod "46c59bb9-7544-496f-a38c-1054b3b95ae8" (UID: "46c59bb9-7544-496f-a38c-1054b3b95ae8"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.533342 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-service-ca" (OuterVolumeSpecName: "service-ca") pod "46c59bb9-7544-496f-a38c-1054b3b95ae8" (UID: "46c59bb9-7544-496f-a38c-1054b3b95ae8"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.533683 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "46c59bb9-7544-496f-a38c-1054b3b95ae8" (UID: "46c59bb9-7544-496f-a38c-1054b3b95ae8"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.539374 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46c59bb9-7544-496f-a38c-1054b3b95ae8-kube-api-access-sh9r6" (OuterVolumeSpecName: "kube-api-access-sh9r6") pod "46c59bb9-7544-496f-a38c-1054b3b95ae8" (UID: "46c59bb9-7544-496f-a38c-1054b3b95ae8"). InnerVolumeSpecName "kube-api-access-sh9r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.540516 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "46c59bb9-7544-496f-a38c-1054b3b95ae8" (UID: "46c59bb9-7544-496f-a38c-1054b3b95ae8"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.559058 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "46c59bb9-7544-496f-a38c-1054b3b95ae8" (UID: "46c59bb9-7544-496f-a38c-1054b3b95ae8"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.634268 4799 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-service-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.634352 4799 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.634400 4799 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.634422 4799 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.634436 4799 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/46c59bb9-7544-496f-a38c-1054b3b95ae8-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.634448 4799 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/46c59bb9-7544-496f-a38c-1054b3b95ae8-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.634459 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh9r6\" (UniqueName: \"kubernetes.io/projected/46c59bb9-7544-496f-a38c-1054b3b95ae8-kube-api-access-sh9r6\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.806863 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ldh6q"] Jan 21 17:46:11 crc kubenswrapper[4799]: E0121 17:46:11.807263 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46c59bb9-7544-496f-a38c-1054b3b95ae8" containerName="console" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.807290 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="46c59bb9-7544-496f-a38c-1054b3b95ae8" containerName="console" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.807431 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="46c59bb9-7544-496f-a38c-1054b3b95ae8" containerName="console" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.808628 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.825861 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ldh6q"] Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.879311 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-m875t_46c59bb9-7544-496f-a38c-1054b3b95ae8/console/0.log" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.879395 4799 generic.go:334] "Generic (PLEG): container finished" podID="46c59bb9-7544-496f-a38c-1054b3b95ae8" containerID="e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79" exitCode=2 Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.879462 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-m875t" event={"ID":"46c59bb9-7544-496f-a38c-1054b3b95ae8","Type":"ContainerDied","Data":"e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79"} Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.879507 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-m875t" event={"ID":"46c59bb9-7544-496f-a38c-1054b3b95ae8","Type":"ContainerDied","Data":"22a071ac9915e4283b81d441ac7a164a56ac8dacc8418c9fbcc85a06c6be238e"} Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.879530 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-m875t" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.879565 4799 scope.go:117] "RemoveContainer" containerID="e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.915000 4799 scope.go:117] "RemoveContainer" containerID="e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79" Jan 21 17:46:11 crc kubenswrapper[4799]: E0121 17:46:11.915952 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79\": container with ID starting with e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79 not found: ID does not exist" containerID="e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.916002 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79"} err="failed to get container status \"e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79\": rpc error: code = NotFound desc = could not find container \"e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79\": container with ID starting with e82eaa84473c7572181c58c26950bd989f5b9bc69c109efec3dbd087af1a2d79 not found: ID does not exist" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.917009 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-m875t"] Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.923679 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-m875t"] Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.938457 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-catalog-content\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.938517 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-utilities\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:11 crc kubenswrapper[4799]: I0121 17:46:11.938981 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb4fm\" (UniqueName: \"kubernetes.io/projected/32ea8c7f-d4bf-4003-a774-aa2380091efd-kube-api-access-sb4fm\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.046206 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb4fm\" (UniqueName: \"kubernetes.io/projected/32ea8c7f-d4bf-4003-a774-aa2380091efd-kube-api-access-sb4fm\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.046308 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-catalog-content\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.046333 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-utilities\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.046972 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-utilities\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.047300 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-catalog-content\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.071099 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb4fm\" (UniqueName: \"kubernetes.io/projected/32ea8c7f-d4bf-4003-a774-aa2380091efd-kube-api-access-sb4fm\") pod \"redhat-operators-ldh6q\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.172745 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.213878 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46c59bb9-7544-496f-a38c-1054b3b95ae8" path="/var/lib/kubelet/pods/46c59bb9-7544-496f-a38c-1054b3b95ae8/volumes" Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.709719 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ldh6q"] Jan 21 17:46:12 crc kubenswrapper[4799]: I0121 17:46:12.888060 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldh6q" event={"ID":"32ea8c7f-d4bf-4003-a774-aa2380091efd","Type":"ContainerStarted","Data":"bbbc7cc0986ba4362dcf6bf792b24ff79cfa72e58b9c7c98f499072f3b5f5824"} Jan 21 17:46:13 crc kubenswrapper[4799]: I0121 17:46:13.901178 4799 generic.go:334] "Generic (PLEG): container finished" podID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerID="ae75d1d811cf1ae335f3f2e939f6a7f04f99f33ad08c273c7dcbc823fe2865fd" exitCode=0 Jan 21 17:46:13 crc kubenswrapper[4799]: I0121 17:46:13.901347 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldh6q" event={"ID":"32ea8c7f-d4bf-4003-a774-aa2380091efd","Type":"ContainerDied","Data":"ae75d1d811cf1ae335f3f2e939f6a7f04f99f33ad08c273c7dcbc823fe2865fd"} Jan 21 17:46:13 crc kubenswrapper[4799]: I0121 17:46:13.905351 4799 generic.go:334] "Generic (PLEG): container finished" podID="dfb67070-f383-42b8-bb55-1406f6994a95" containerID="42ad53ce4e8207b0e192362e206b73bb44e0aa5286c5d5614b0118c35e7fe5f6" exitCode=0 Jan 21 17:46:13 crc kubenswrapper[4799]: I0121 17:46:13.905391 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" event={"ID":"dfb67070-f383-42b8-bb55-1406f6994a95","Type":"ContainerDied","Data":"42ad53ce4e8207b0e192362e206b73bb44e0aa5286c5d5614b0118c35e7fe5f6"} Jan 21 17:46:14 crc kubenswrapper[4799]: I0121 17:46:14.916878 4799 generic.go:334] "Generic (PLEG): container finished" podID="dfb67070-f383-42b8-bb55-1406f6994a95" containerID="5bb025af42a12b9c26921102fb2c9ff90b225bd73f333425ebbab7e912dbdaca" exitCode=0 Jan 21 17:46:14 crc kubenswrapper[4799]: I0121 17:46:14.916964 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" event={"ID":"dfb67070-f383-42b8-bb55-1406f6994a95","Type":"ContainerDied","Data":"5bb025af42a12b9c26921102fb2c9ff90b225bd73f333425ebbab7e912dbdaca"} Jan 21 17:46:15 crc kubenswrapper[4799]: I0121 17:46:15.927636 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldh6q" event={"ID":"32ea8c7f-d4bf-4003-a774-aa2380091efd","Type":"ContainerStarted","Data":"d25b113a97b24c8823fc22133c514203f050577ae9940932a0fc05f6a0faae4a"} Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.260747 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.374029 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-bundle\") pod \"dfb67070-f383-42b8-bb55-1406f6994a95\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.374432 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ggqv\" (UniqueName: \"kubernetes.io/projected/dfb67070-f383-42b8-bb55-1406f6994a95-kube-api-access-2ggqv\") pod \"dfb67070-f383-42b8-bb55-1406f6994a95\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.374553 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-util\") pod \"dfb67070-f383-42b8-bb55-1406f6994a95\" (UID: \"dfb67070-f383-42b8-bb55-1406f6994a95\") " Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.376272 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-bundle" (OuterVolumeSpecName: "bundle") pod "dfb67070-f383-42b8-bb55-1406f6994a95" (UID: "dfb67070-f383-42b8-bb55-1406f6994a95"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.384625 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-util" (OuterVolumeSpecName: "util") pod "dfb67070-f383-42b8-bb55-1406f6994a95" (UID: "dfb67070-f383-42b8-bb55-1406f6994a95"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.395005 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfb67070-f383-42b8-bb55-1406f6994a95-kube-api-access-2ggqv" (OuterVolumeSpecName: "kube-api-access-2ggqv") pod "dfb67070-f383-42b8-bb55-1406f6994a95" (UID: "dfb67070-f383-42b8-bb55-1406f6994a95"). InnerVolumeSpecName "kube-api-access-2ggqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.476358 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.476396 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ggqv\" (UniqueName: \"kubernetes.io/projected/dfb67070-f383-42b8-bb55-1406f6994a95-kube-api-access-2ggqv\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.476412 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb67070-f383-42b8-bb55-1406f6994a95-util\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.937718 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" event={"ID":"dfb67070-f383-42b8-bb55-1406f6994a95","Type":"ContainerDied","Data":"38b8accc52c6de264251509158903d81bd79b8472b5499c949809ec342a95dfe"} Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.937774 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38b8accc52c6de264251509158903d81bd79b8472b5499c949809ec342a95dfe" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.937791 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf" Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.942337 4799 generic.go:334] "Generic (PLEG): container finished" podID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerID="d25b113a97b24c8823fc22133c514203f050577ae9940932a0fc05f6a0faae4a" exitCode=0 Jan 21 17:46:16 crc kubenswrapper[4799]: I0121 17:46:16.942381 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldh6q" event={"ID":"32ea8c7f-d4bf-4003-a774-aa2380091efd","Type":"ContainerDied","Data":"d25b113a97b24c8823fc22133c514203f050577ae9940932a0fc05f6a0faae4a"} Jan 21 17:46:17 crc kubenswrapper[4799]: I0121 17:46:17.952020 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldh6q" event={"ID":"32ea8c7f-d4bf-4003-a774-aa2380091efd","Type":"ContainerStarted","Data":"85255dfbbc54db7cada1198cf8442fec018b648371040667fc137c65f2bbc355"} Jan 21 17:46:17 crc kubenswrapper[4799]: I0121 17:46:17.977432 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ldh6q" podStartSLOduration=3.547271704 podStartE2EDuration="6.977386011s" podCreationTimestamp="2026-01-21 17:46:11 +0000 UTC" firstStartedPulling="2026-01-21 17:46:13.903224201 +0000 UTC m=+800.529514244" lastFinishedPulling="2026-01-21 17:46:17.333338518 +0000 UTC m=+803.959628551" observedRunningTime="2026-01-21 17:46:17.972787461 +0000 UTC m=+804.599077494" watchObservedRunningTime="2026-01-21 17:46:17.977386011 +0000 UTC m=+804.603676034" Jan 21 17:46:22 crc kubenswrapper[4799]: I0121 17:46:22.173865 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:22 crc kubenswrapper[4799]: I0121 17:46:22.173960 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:23 crc kubenswrapper[4799]: I0121 17:46:23.214031 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ldh6q" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="registry-server" probeResult="failure" output=< Jan 21 17:46:23 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 17:46:23 crc kubenswrapper[4799]: > Jan 21 17:46:25 crc kubenswrapper[4799]: I0121 17:46:25.970857 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:46:25 crc kubenswrapper[4799]: I0121 17:46:25.972182 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.581743 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj"] Jan 21 17:46:26 crc kubenswrapper[4799]: E0121 17:46:26.582706 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb67070-f383-42b8-bb55-1406f6994a95" containerName="util" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.582741 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb67070-f383-42b8-bb55-1406f6994a95" containerName="util" Jan 21 17:46:26 crc kubenswrapper[4799]: E0121 17:46:26.582773 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb67070-f383-42b8-bb55-1406f6994a95" containerName="extract" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.582781 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb67070-f383-42b8-bb55-1406f6994a95" containerName="extract" Jan 21 17:46:26 crc kubenswrapper[4799]: E0121 17:46:26.582791 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb67070-f383-42b8-bb55-1406f6994a95" containerName="pull" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.582799 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb67070-f383-42b8-bb55-1406f6994a95" containerName="pull" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.582984 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfb67070-f383-42b8-bb55-1406f6994a95" containerName="extract" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.583969 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.587640 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.587814 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.588104 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-d9rq6" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.594573 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.594877 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.602973 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj"] Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.638450 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nb4j\" (UniqueName: \"kubernetes.io/projected/7007eeb3-f638-4564-bef1-01c1799f9659-kube-api-access-6nb4j\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.638570 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7007eeb3-f638-4564-bef1-01c1799f9659-webhook-cert\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.638633 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7007eeb3-f638-4564-bef1-01c1799f9659-apiservice-cert\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.740868 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nb4j\" (UniqueName: \"kubernetes.io/projected/7007eeb3-f638-4564-bef1-01c1799f9659-kube-api-access-6nb4j\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.741006 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7007eeb3-f638-4564-bef1-01c1799f9659-webhook-cert\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.741040 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7007eeb3-f638-4564-bef1-01c1799f9659-apiservice-cert\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.749938 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7007eeb3-f638-4564-bef1-01c1799f9659-apiservice-cert\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.766576 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nb4j\" (UniqueName: \"kubernetes.io/projected/7007eeb3-f638-4564-bef1-01c1799f9659-kube-api-access-6nb4j\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.767813 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7007eeb3-f638-4564-bef1-01c1799f9659-webhook-cert\") pod \"metallb-operator-controller-manager-58dfbb9557-5cwxj\" (UID: \"7007eeb3-f638-4564-bef1-01c1799f9659\") " pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.906756 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.978621 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-766568f764-6v2cv"] Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.979768 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.985250 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-44dbx" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.986249 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 21 17:46:26 crc kubenswrapper[4799]: I0121 17:46:26.986478 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.010104 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-766568f764-6v2cv"] Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.047588 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c911a8ad-608f-480c-83b2-672c420e3091-apiservice-cert\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.048077 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c911a8ad-608f-480c-83b2-672c420e3091-webhook-cert\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.048240 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6cg6\" (UniqueName: \"kubernetes.io/projected/c911a8ad-608f-480c-83b2-672c420e3091-kube-api-access-j6cg6\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.152365 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c911a8ad-608f-480c-83b2-672c420e3091-apiservice-cert\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.152483 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c911a8ad-608f-480c-83b2-672c420e3091-webhook-cert\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.152573 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6cg6\" (UniqueName: \"kubernetes.io/projected/c911a8ad-608f-480c-83b2-672c420e3091-kube-api-access-j6cg6\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.163395 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c911a8ad-608f-480c-83b2-672c420e3091-webhook-cert\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.165087 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c911a8ad-608f-480c-83b2-672c420e3091-apiservice-cert\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.199865 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6cg6\" (UniqueName: \"kubernetes.io/projected/c911a8ad-608f-480c-83b2-672c420e3091-kube-api-access-j6cg6\") pod \"metallb-operator-webhook-server-766568f764-6v2cv\" (UID: \"c911a8ad-608f-480c-83b2-672c420e3091\") " pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.327521 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.557271 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj"] Jan 21 17:46:27 crc kubenswrapper[4799]: W0121 17:46:27.588365 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7007eeb3_f638_4564_bef1_01c1799f9659.slice/crio-2fc1dd4cbbed815b7dff02b723ad65108bbda2b65ef404290d3d25332d474c8c WatchSource:0}: Error finding container 2fc1dd4cbbed815b7dff02b723ad65108bbda2b65ef404290d3d25332d474c8c: Status 404 returned error can't find the container with id 2fc1dd4cbbed815b7dff02b723ad65108bbda2b65ef404290d3d25332d474c8c Jan 21 17:46:27 crc kubenswrapper[4799]: I0121 17:46:27.809482 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-766568f764-6v2cv"] Jan 21 17:46:28 crc kubenswrapper[4799]: I0121 17:46:28.092723 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" event={"ID":"c911a8ad-608f-480c-83b2-672c420e3091","Type":"ContainerStarted","Data":"7b255aca96dfc08ce0b9e4dc1ec7f02880618bc4805c261326c1e52c88d2c879"} Jan 21 17:46:28 crc kubenswrapper[4799]: I0121 17:46:28.096468 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" event={"ID":"7007eeb3-f638-4564-bef1-01c1799f9659","Type":"ContainerStarted","Data":"2fc1dd4cbbed815b7dff02b723ad65108bbda2b65ef404290d3d25332d474c8c"} Jan 21 17:46:32 crc kubenswrapper[4799]: I0121 17:46:32.222358 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:32 crc kubenswrapper[4799]: I0121 17:46:32.290601 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:32 crc kubenswrapper[4799]: I0121 17:46:32.477321 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ldh6q"] Jan 21 17:46:34 crc kubenswrapper[4799]: I0121 17:46:34.156039 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ldh6q" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="registry-server" containerID="cri-o://85255dfbbc54db7cada1198cf8442fec018b648371040667fc137c65f2bbc355" gracePeriod=2 Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.167402 4799 generic.go:334] "Generic (PLEG): container finished" podID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerID="85255dfbbc54db7cada1198cf8442fec018b648371040667fc137c65f2bbc355" exitCode=0 Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.167468 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldh6q" event={"ID":"32ea8c7f-d4bf-4003-a774-aa2380091efd","Type":"ContainerDied","Data":"85255dfbbc54db7cada1198cf8442fec018b648371040667fc137c65f2bbc355"} Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.560177 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.729547 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-catalog-content\") pod \"32ea8c7f-d4bf-4003-a774-aa2380091efd\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.729628 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-utilities\") pod \"32ea8c7f-d4bf-4003-a774-aa2380091efd\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.729664 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb4fm\" (UniqueName: \"kubernetes.io/projected/32ea8c7f-d4bf-4003-a774-aa2380091efd-kube-api-access-sb4fm\") pod \"32ea8c7f-d4bf-4003-a774-aa2380091efd\" (UID: \"32ea8c7f-d4bf-4003-a774-aa2380091efd\") " Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.731113 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-utilities" (OuterVolumeSpecName: "utilities") pod "32ea8c7f-d4bf-4003-a774-aa2380091efd" (UID: "32ea8c7f-d4bf-4003-a774-aa2380091efd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.741631 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32ea8c7f-d4bf-4003-a774-aa2380091efd-kube-api-access-sb4fm" (OuterVolumeSpecName: "kube-api-access-sb4fm") pod "32ea8c7f-d4bf-4003-a774-aa2380091efd" (UID: "32ea8c7f-d4bf-4003-a774-aa2380091efd"). InnerVolumeSpecName "kube-api-access-sb4fm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.831617 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.831661 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb4fm\" (UniqueName: \"kubernetes.io/projected/32ea8c7f-d4bf-4003-a774-aa2380091efd-kube-api-access-sb4fm\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.847723 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32ea8c7f-d4bf-4003-a774-aa2380091efd" (UID: "32ea8c7f-d4bf-4003-a774-aa2380091efd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:46:35 crc kubenswrapper[4799]: I0121 17:46:35.932693 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32ea8c7f-d4bf-4003-a774-aa2380091efd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.192067 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" event={"ID":"c911a8ad-608f-480c-83b2-672c420e3091","Type":"ContainerStarted","Data":"8f52743d2d9edf67459e78fafd21e4feec1a5c39cdcb3b10f3ed75b5458028c6"} Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.193832 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" event={"ID":"7007eeb3-f638-4564-bef1-01c1799f9659","Type":"ContainerStarted","Data":"eddf2eb3daabe0e21bc1b1441dde5a185a097f22b0509bf9ecf73521c332d44b"} Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.193983 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.196155 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldh6q" event={"ID":"32ea8c7f-d4bf-4003-a774-aa2380091efd","Type":"ContainerDied","Data":"bbbc7cc0986ba4362dcf6bf792b24ff79cfa72e58b9c7c98f499072f3b5f5824"} Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.196226 4799 scope.go:117] "RemoveContainer" containerID="85255dfbbc54db7cada1198cf8442fec018b648371040667fc137c65f2bbc355" Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.196380 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ldh6q" Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.227222 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" podStartSLOduration=2.738718415 podStartE2EDuration="10.22716659s" podCreationTimestamp="2026-01-21 17:46:26 +0000 UTC" firstStartedPulling="2026-01-21 17:46:27.829597823 +0000 UTC m=+814.455887836" lastFinishedPulling="2026-01-21 17:46:35.318045988 +0000 UTC m=+821.944336011" observedRunningTime="2026-01-21 17:46:36.217881626 +0000 UTC m=+822.844171659" watchObservedRunningTime="2026-01-21 17:46:36.22716659 +0000 UTC m=+822.853456623" Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.249944 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" podStartSLOduration=2.598905454 podStartE2EDuration="10.249909246s" podCreationTimestamp="2026-01-21 17:46:26 +0000 UTC" firstStartedPulling="2026-01-21 17:46:27.628380498 +0000 UTC m=+814.254670531" lastFinishedPulling="2026-01-21 17:46:35.2793843 +0000 UTC m=+821.905674323" observedRunningTime="2026-01-21 17:46:36.244954875 +0000 UTC m=+822.871244908" watchObservedRunningTime="2026-01-21 17:46:36.249909246 +0000 UTC m=+822.876199269" Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.258911 4799 scope.go:117] "RemoveContainer" containerID="d25b113a97b24c8823fc22133c514203f050577ae9940932a0fc05f6a0faae4a" Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.477501 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ldh6q"] Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.487241 4799 scope.go:117] "RemoveContainer" containerID="ae75d1d811cf1ae335f3f2e939f6a7f04f99f33ad08c273c7dcbc823fe2865fd" Jan 21 17:46:36 crc kubenswrapper[4799]: I0121 17:46:36.494537 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ldh6q"] Jan 21 17:46:37 crc kubenswrapper[4799]: I0121 17:46:37.205006 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:38 crc kubenswrapper[4799]: I0121 17:46:38.216846 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" path="/var/lib/kubelet/pods/32ea8c7f-d4bf-4003-a774-aa2380091efd/volumes" Jan 21 17:46:47 crc kubenswrapper[4799]: I0121 17:46:47.339798 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-766568f764-6v2cv" Jan 21 17:46:55 crc kubenswrapper[4799]: I0121 17:46:55.971030 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:46:55 crc kubenswrapper[4799]: I0121 17:46:55.971859 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:47:06 crc kubenswrapper[4799]: I0121 17:47:06.910682 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-58dfbb9557-5cwxj" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.717553 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t"] Jan 21 17:47:07 crc kubenswrapper[4799]: E0121 17:47:07.718496 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="extract-content" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.718547 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="extract-content" Jan 21 17:47:07 crc kubenswrapper[4799]: E0121 17:47:07.718582 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="extract-utilities" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.718591 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="extract-utilities" Jan 21 17:47:07 crc kubenswrapper[4799]: E0121 17:47:07.718603 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="registry-server" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.718615 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="registry-server" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.718816 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ea8c7f-d4bf-4003-a774-aa2380091efd" containerName="registry-server" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.719623 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.723234 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.723764 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-znpln" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.731592 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-74wj8"] Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.736438 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.738097 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.739347 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.743598 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t"] Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810578 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8tkk\" (UniqueName: \"kubernetes.io/projected/1568add4-52bd-4796-87e0-2d9fc9f92324-kube-api-access-h8tkk\") pod \"frr-k8s-webhook-server-7df86c4f6c-4n47t\" (UID: \"1568add4-52bd-4796-87e0-2d9fc9f92324\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810643 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-reloader\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810715 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810734 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-startup\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810771 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dtr4\" (UniqueName: \"kubernetes.io/projected/1f8c16b9-b58d-4bf1-a086-47e9c8339544-kube-api-access-2dtr4\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810854 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1568add4-52bd-4796-87e0-2d9fc9f92324-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-4n47t\" (UID: \"1568add4-52bd-4796-87e0-2d9fc9f92324\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810929 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics-certs\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810963 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-conf\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.810990 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-sockets\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.826202 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-cdw6h"] Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.827651 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-cdw6h" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.832229 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.832571 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-jglnd" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.832717 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.835066 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.835324 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-sbtsn"] Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.846259 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.849898 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.853773 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-sbtsn"] Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913377 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-metallb-excludel2\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913504 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913543 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-startup\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913572 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrv5l\" (UniqueName: \"kubernetes.io/projected/0af2a3ea-da2e-4b99-9486-ce12263a62bf-kube-api-access-hrv5l\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913613 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dtr4\" (UniqueName: \"kubernetes.io/projected/1f8c16b9-b58d-4bf1-a086-47e9c8339544-kube-api-access-2dtr4\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913634 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1568add4-52bd-4796-87e0-2d9fc9f92324-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-4n47t\" (UID: \"1568add4-52bd-4796-87e0-2d9fc9f92324\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913662 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913679 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics-certs\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913700 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-cert\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913725 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-conf\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913771 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9sdq\" (UniqueName: \"kubernetes.io/projected/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-kube-api-access-m9sdq\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913789 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-sockets\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913808 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-metrics-certs\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913827 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8tkk\" (UniqueName: \"kubernetes.io/projected/1568add4-52bd-4796-87e0-2d9fc9f92324-kube-api-access-h8tkk\") pod \"frr-k8s-webhook-server-7df86c4f6c-4n47t\" (UID: \"1568add4-52bd-4796-87e0-2d9fc9f92324\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913848 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-reloader\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.913876 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-metrics-certs\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.914354 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.915144 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-startup\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.915402 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-sockets\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.915521 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-reloader\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: E0121 17:47:07.915584 4799 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Jan 21 17:47:07 crc kubenswrapper[4799]: E0121 17:47:07.915713 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics-certs podName:1f8c16b9-b58d-4bf1-a086-47e9c8339544 nodeName:}" failed. No retries permitted until 2026-01-21 17:47:08.415678186 +0000 UTC m=+855.041968409 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics-certs") pod "frr-k8s-74wj8" (UID: "1f8c16b9-b58d-4bf1-a086-47e9c8339544") : secret "frr-k8s-certs-secret" not found Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.915850 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1f8c16b9-b58d-4bf1-a086-47e9c8339544-frr-conf\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.934789 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8tkk\" (UniqueName: \"kubernetes.io/projected/1568add4-52bd-4796-87e0-2d9fc9f92324-kube-api-access-h8tkk\") pod \"frr-k8s-webhook-server-7df86c4f6c-4n47t\" (UID: \"1568add4-52bd-4796-87e0-2d9fc9f92324\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.940904 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dtr4\" (UniqueName: \"kubernetes.io/projected/1f8c16b9-b58d-4bf1-a086-47e9c8339544-kube-api-access-2dtr4\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:07 crc kubenswrapper[4799]: I0121 17:47:07.942917 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1568add4-52bd-4796-87e0-2d9fc9f92324-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-4n47t\" (UID: \"1568add4-52bd-4796-87e0-2d9fc9f92324\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.014820 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-metrics-certs\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.014899 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-metallb-excludel2\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.014955 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrv5l\" (UniqueName: \"kubernetes.io/projected/0af2a3ea-da2e-4b99-9486-ce12263a62bf-kube-api-access-hrv5l\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.014992 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.015026 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-cert\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.015048 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9sdq\" (UniqueName: \"kubernetes.io/projected/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-kube-api-access-m9sdq\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.015069 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-metrics-certs\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:08 crc kubenswrapper[4799]: E0121 17:47:08.015058 4799 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Jan 21 17:47:08 crc kubenswrapper[4799]: E0121 17:47:08.015232 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-metrics-certs podName:0af2a3ea-da2e-4b99-9486-ce12263a62bf nodeName:}" failed. No retries permitted until 2026-01-21 17:47:08.51520124 +0000 UTC m=+855.141491263 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-metrics-certs") pod "controller-6968d8fdc4-sbtsn" (UID: "0af2a3ea-da2e-4b99-9486-ce12263a62bf") : secret "controller-certs-secret" not found Jan 21 17:47:08 crc kubenswrapper[4799]: E0121 17:47:08.015571 4799 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 21 17:47:08 crc kubenswrapper[4799]: E0121 17:47:08.015602 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist podName:6c55e902-cf8f-4a8d-ade3-4bd470144d8e nodeName:}" failed. No retries permitted until 2026-01-21 17:47:08.515595431 +0000 UTC m=+855.141885454 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist") pod "speaker-cdw6h" (UID: "6c55e902-cf8f-4a8d-ade3-4bd470144d8e") : secret "metallb-memberlist" not found Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.016555 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-metallb-excludel2\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.020580 4799 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.020762 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-metrics-certs\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.036208 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-cert\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.036740 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrv5l\" (UniqueName: \"kubernetes.io/projected/0af2a3ea-da2e-4b99-9486-ce12263a62bf-kube-api-access-hrv5l\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.036957 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9sdq\" (UniqueName: \"kubernetes.io/projected/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-kube-api-access-m9sdq\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.055950 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.422404 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics-certs\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.430023 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f8c16b9-b58d-4bf1-a086-47e9c8339544-metrics-certs\") pod \"frr-k8s-74wj8\" (UID: \"1f8c16b9-b58d-4bf1-a086-47e9c8339544\") " pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.524607 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-metrics-certs\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.524787 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:08 crc kubenswrapper[4799]: E0121 17:47:08.524998 4799 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 21 17:47:08 crc kubenswrapper[4799]: E0121 17:47:08.525098 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist podName:6c55e902-cf8f-4a8d-ade3-4bd470144d8e nodeName:}" failed. No retries permitted until 2026-01-21 17:47:09.525062449 +0000 UTC m=+856.151352472 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist") pod "speaker-cdw6h" (UID: "6c55e902-cf8f-4a8d-ade3-4bd470144d8e") : secret "metallb-memberlist" not found Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.531290 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0af2a3ea-da2e-4b99-9486-ce12263a62bf-metrics-certs\") pod \"controller-6968d8fdc4-sbtsn\" (UID: \"0af2a3ea-da2e-4b99-9486-ce12263a62bf\") " pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.532043 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t"] Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.663316 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:08 crc kubenswrapper[4799]: I0121 17:47:08.769772 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:09 crc kubenswrapper[4799]: I0121 17:47:09.229362 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-sbtsn"] Jan 21 17:47:09 crc kubenswrapper[4799]: W0121 17:47:09.240353 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0af2a3ea_da2e_4b99_9486_ce12263a62bf.slice/crio-927f0d1a8c03fcba084c38fcd7ae693768ae39c609178885d55ea3205051b8d8 WatchSource:0}: Error finding container 927f0d1a8c03fcba084c38fcd7ae693768ae39c609178885d55ea3205051b8d8: Status 404 returned error can't find the container with id 927f0d1a8c03fcba084c38fcd7ae693768ae39c609178885d55ea3205051b8d8 Jan 21 17:47:09 crc kubenswrapper[4799]: I0121 17:47:09.472141 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" event={"ID":"1568add4-52bd-4796-87e0-2d9fc9f92324","Type":"ContainerStarted","Data":"1d4949c6ce3647247aa7508e27b7988899d87cb3c8d07482d689edb6a2d53c54"} Jan 21 17:47:09 crc kubenswrapper[4799]: I0121 17:47:09.476104 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerStarted","Data":"61ff6ed5183f4ce4d82ab02c94e566a4e925bbc0a972f63004678ff2d9fabb74"} Jan 21 17:47:09 crc kubenswrapper[4799]: I0121 17:47:09.479720 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-sbtsn" event={"ID":"0af2a3ea-da2e-4b99-9486-ce12263a62bf","Type":"ContainerStarted","Data":"60691b3038ecdd0692be2510dbe938c13d04af30663de23374626b90a34fae09"} Jan 21 17:47:09 crc kubenswrapper[4799]: I0121 17:47:09.479807 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-sbtsn" event={"ID":"0af2a3ea-da2e-4b99-9486-ce12263a62bf","Type":"ContainerStarted","Data":"927f0d1a8c03fcba084c38fcd7ae693768ae39c609178885d55ea3205051b8d8"} Jan 21 17:47:09 crc kubenswrapper[4799]: I0121 17:47:09.547623 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:09 crc kubenswrapper[4799]: I0121 17:47:09.559374 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c55e902-cf8f-4a8d-ade3-4bd470144d8e-memberlist\") pod \"speaker-cdw6h\" (UID: \"6c55e902-cf8f-4a8d-ade3-4bd470144d8e\") " pod="metallb-system/speaker-cdw6h" Jan 21 17:47:09 crc kubenswrapper[4799]: I0121 17:47:09.649797 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-cdw6h" Jan 21 17:47:09 crc kubenswrapper[4799]: W0121 17:47:09.680724 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c55e902_cf8f_4a8d_ade3_4bd470144d8e.slice/crio-beb4dc3eb7e36496ab299347d869720ede6bdae186a4734331cb5e40ccaa2b4c WatchSource:0}: Error finding container beb4dc3eb7e36496ab299347d869720ede6bdae186a4734331cb5e40ccaa2b4c: Status 404 returned error can't find the container with id beb4dc3eb7e36496ab299347d869720ede6bdae186a4734331cb5e40ccaa2b4c Jan 21 17:47:10 crc kubenswrapper[4799]: I0121 17:47:10.499148 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-sbtsn" event={"ID":"0af2a3ea-da2e-4b99-9486-ce12263a62bf","Type":"ContainerStarted","Data":"4573b4aad8711abfc71c37f332e94cc7577b43cf21ced8f9a2ff1f25d59ed68c"} Jan 21 17:47:10 crc kubenswrapper[4799]: I0121 17:47:10.499623 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:10 crc kubenswrapper[4799]: I0121 17:47:10.508143 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cdw6h" event={"ID":"6c55e902-cf8f-4a8d-ade3-4bd470144d8e","Type":"ContainerStarted","Data":"dc4650548b2b5088d1007bf8d36e58cfc1c45391bbe7673978c25738ca7a25aa"} Jan 21 17:47:10 crc kubenswrapper[4799]: I0121 17:47:10.508191 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cdw6h" event={"ID":"6c55e902-cf8f-4a8d-ade3-4bd470144d8e","Type":"ContainerStarted","Data":"b1ea1d44ad397b1c8a9042fb72e10d618c52d2f23c28046dfc265f571d5cf672"} Jan 21 17:47:10 crc kubenswrapper[4799]: I0121 17:47:10.508200 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-cdw6h" event={"ID":"6c55e902-cf8f-4a8d-ade3-4bd470144d8e","Type":"ContainerStarted","Data":"beb4dc3eb7e36496ab299347d869720ede6bdae186a4734331cb5e40ccaa2b4c"} Jan 21 17:47:10 crc kubenswrapper[4799]: I0121 17:47:10.509173 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-cdw6h" Jan 21 17:47:10 crc kubenswrapper[4799]: I0121 17:47:10.536547 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-sbtsn" podStartSLOduration=3.536509783 podStartE2EDuration="3.536509783s" podCreationTimestamp="2026-01-21 17:47:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:47:10.531357358 +0000 UTC m=+857.157647391" watchObservedRunningTime="2026-01-21 17:47:10.536509783 +0000 UTC m=+857.162799796" Jan 21 17:47:10 crc kubenswrapper[4799]: I0121 17:47:10.563139 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-cdw6h" podStartSLOduration=3.563101295 podStartE2EDuration="3.563101295s" podCreationTimestamp="2026-01-21 17:47:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:47:10.559103972 +0000 UTC m=+857.185393995" watchObservedRunningTime="2026-01-21 17:47:10.563101295 +0000 UTC m=+857.189391318" Jan 21 17:47:17 crc kubenswrapper[4799]: E0121 17:47:17.304321 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f8c16b9_b58d_4bf1_a086_47e9c8339544.slice/crio-conmon-603bf8c38bbc19cfe8d9948288f59c136ad6e1a52a93fe3cace855434bb015f6.scope\": RecentStats: unable to find data in memory cache]" Jan 21 17:47:17 crc kubenswrapper[4799]: I0121 17:47:17.586689 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" event={"ID":"1568add4-52bd-4796-87e0-2d9fc9f92324","Type":"ContainerStarted","Data":"176927a2876c0417a0d3a5932ddc16ee2f13a38cf5936ee7862ea1f304420bb4"} Jan 21 17:47:17 crc kubenswrapper[4799]: I0121 17:47:17.586866 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:17 crc kubenswrapper[4799]: I0121 17:47:17.589084 4799 generic.go:334] "Generic (PLEG): container finished" podID="1f8c16b9-b58d-4bf1-a086-47e9c8339544" containerID="603bf8c38bbc19cfe8d9948288f59c136ad6e1a52a93fe3cace855434bb015f6" exitCode=0 Jan 21 17:47:17 crc kubenswrapper[4799]: I0121 17:47:17.589163 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerDied","Data":"603bf8c38bbc19cfe8d9948288f59c136ad6e1a52a93fe3cace855434bb015f6"} Jan 21 17:47:17 crc kubenswrapper[4799]: I0121 17:47:17.613647 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" podStartSLOduration=2.16653366 podStartE2EDuration="10.613618787s" podCreationTimestamp="2026-01-21 17:47:07 +0000 UTC" firstStartedPulling="2026-01-21 17:47:08.542107792 +0000 UTC m=+855.168397815" lastFinishedPulling="2026-01-21 17:47:16.989192929 +0000 UTC m=+863.615482942" observedRunningTime="2026-01-21 17:47:17.609613884 +0000 UTC m=+864.235903917" watchObservedRunningTime="2026-01-21 17:47:17.613618787 +0000 UTC m=+864.239908810" Jan 21 17:47:18 crc kubenswrapper[4799]: I0121 17:47:18.601087 4799 generic.go:334] "Generic (PLEG): container finished" podID="1f8c16b9-b58d-4bf1-a086-47e9c8339544" containerID="242a44f30059678b5f8e04f89548c23ae8c64585d9e9c8e02121e934ad0ed1b4" exitCode=0 Jan 21 17:47:18 crc kubenswrapper[4799]: I0121 17:47:18.601240 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerDied","Data":"242a44f30059678b5f8e04f89548c23ae8c64585d9e9c8e02121e934ad0ed1b4"} Jan 21 17:47:19 crc kubenswrapper[4799]: I0121 17:47:19.611982 4799 generic.go:334] "Generic (PLEG): container finished" podID="1f8c16b9-b58d-4bf1-a086-47e9c8339544" containerID="17ce36e65b9c50e82f626274321fb38964cd6a24db33e78c1dde5a9c482f8262" exitCode=0 Jan 21 17:47:19 crc kubenswrapper[4799]: I0121 17:47:19.612063 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerDied","Data":"17ce36e65b9c50e82f626274321fb38964cd6a24db33e78c1dde5a9c482f8262"} Jan 21 17:47:20 crc kubenswrapper[4799]: I0121 17:47:20.622057 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerStarted","Data":"24267142c8e916f127877b911e1e566a1241d6a8fcb29dfdc35ec8e61c1e0c44"} Jan 21 17:47:20 crc kubenswrapper[4799]: I0121 17:47:20.623069 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerStarted","Data":"cdbf8deaf46b176299090c9d13e232959cfa80c0c529e6211983e7530021e0b8"} Jan 21 17:47:21 crc kubenswrapper[4799]: I0121 17:47:21.637754 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerStarted","Data":"8bbc93cac21d840de6fca71c72c4e44f43b68a17ee59b4c405fa19ee0fee4717"} Jan 21 17:47:21 crc kubenswrapper[4799]: I0121 17:47:21.637827 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerStarted","Data":"63eecb678b3c388aba8e06f683cae7c4e48c6302c50e39ece769d10c98540252"} Jan 21 17:47:22 crc kubenswrapper[4799]: I0121 17:47:22.663215 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerStarted","Data":"f79aa5ac8b386fd032ee27dc4a89cc29b8c18bae25143ec7df1a57956a96ba0d"} Jan 21 17:47:22 crc kubenswrapper[4799]: I0121 17:47:22.663599 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-74wj8" event={"ID":"1f8c16b9-b58d-4bf1-a086-47e9c8339544","Type":"ContainerStarted","Data":"6280ae84b9f1a329dc3d15d7d48faf9d0ee98872111078f399b40659b70b29a4"} Jan 21 17:47:22 crc kubenswrapper[4799]: I0121 17:47:22.663768 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:22 crc kubenswrapper[4799]: I0121 17:47:22.696015 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-74wj8" podStartSLOduration=7.5312763799999995 podStartE2EDuration="15.695993111s" podCreationTimestamp="2026-01-21 17:47:07 +0000 UTC" firstStartedPulling="2026-01-21 17:47:08.812686674 +0000 UTC m=+855.438976697" lastFinishedPulling="2026-01-21 17:47:16.977403395 +0000 UTC m=+863.603693428" observedRunningTime="2026-01-21 17:47:22.694630672 +0000 UTC m=+869.320920715" watchObservedRunningTime="2026-01-21 17:47:22.695993111 +0000 UTC m=+869.322283124" Jan 21 17:47:23 crc kubenswrapper[4799]: I0121 17:47:23.663975 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:23 crc kubenswrapper[4799]: I0121 17:47:23.708568 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:25 crc kubenswrapper[4799]: I0121 17:47:25.971638 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:47:25 crc kubenswrapper[4799]: I0121 17:47:25.972088 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:47:25 crc kubenswrapper[4799]: I0121 17:47:25.973104 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:47:25 crc kubenswrapper[4799]: I0121 17:47:25.974306 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eed6e35e0dd567b7136adb6f803c960c31a5e8beac68fc922967bfc8623a01c5"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 17:47:25 crc kubenswrapper[4799]: I0121 17:47:25.974438 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://eed6e35e0dd567b7136adb6f803c960c31a5e8beac68fc922967bfc8623a01c5" gracePeriod=600 Jan 21 17:47:26 crc kubenswrapper[4799]: I0121 17:47:26.707215 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="eed6e35e0dd567b7136adb6f803c960c31a5e8beac68fc922967bfc8623a01c5" exitCode=0 Jan 21 17:47:26 crc kubenswrapper[4799]: I0121 17:47:26.707841 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"eed6e35e0dd567b7136adb6f803c960c31a5e8beac68fc922967bfc8623a01c5"} Jan 21 17:47:26 crc kubenswrapper[4799]: I0121 17:47:26.707929 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"ae5330e16575441a8b84498a2fefd6345766a3ffb339a011bad17c508c054c31"} Jan 21 17:47:26 crc kubenswrapper[4799]: I0121 17:47:26.707962 4799 scope.go:117] "RemoveContainer" containerID="cf754122da61833aa1525f3575372a725cd96a25aa66c0876c3c4f82026fd7ab" Jan 21 17:47:28 crc kubenswrapper[4799]: I0121 17:47:28.063002 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-4n47t" Jan 21 17:47:28 crc kubenswrapper[4799]: I0121 17:47:28.777295 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-sbtsn" Jan 21 17:47:29 crc kubenswrapper[4799]: I0121 17:47:29.655602 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-cdw6h" Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.542959 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-22zcw"] Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.545195 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-22zcw" Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.550347 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.550352 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.550690 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-qxd42" Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.561306 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-22zcw"] Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.737997 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b57lg\" (UniqueName: \"kubernetes.io/projected/fef3455e-f779-404d-bf2c-40703b3b3b05-kube-api-access-b57lg\") pod \"openstack-operator-index-22zcw\" (UID: \"fef3455e-f779-404d-bf2c-40703b3b3b05\") " pod="openstack-operators/openstack-operator-index-22zcw" Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.840211 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b57lg\" (UniqueName: \"kubernetes.io/projected/fef3455e-f779-404d-bf2c-40703b3b3b05-kube-api-access-b57lg\") pod \"openstack-operator-index-22zcw\" (UID: \"fef3455e-f779-404d-bf2c-40703b3b3b05\") " pod="openstack-operators/openstack-operator-index-22zcw" Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.863238 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b57lg\" (UniqueName: \"kubernetes.io/projected/fef3455e-f779-404d-bf2c-40703b3b3b05-kube-api-access-b57lg\") pod \"openstack-operator-index-22zcw\" (UID: \"fef3455e-f779-404d-bf2c-40703b3b3b05\") " pod="openstack-operators/openstack-operator-index-22zcw" Jan 21 17:47:33 crc kubenswrapper[4799]: I0121 17:47:33.867472 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-22zcw" Jan 21 17:47:34 crc kubenswrapper[4799]: I0121 17:47:34.320344 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-22zcw"] Jan 21 17:47:34 crc kubenswrapper[4799]: I0121 17:47:34.798212 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-22zcw" event={"ID":"fef3455e-f779-404d-bf2c-40703b3b3b05","Type":"ContainerStarted","Data":"54f8c43f7b57a556d51eb44a22342f0d3a21904e8e1c90844931032244be4d4a"} Jan 21 17:47:36 crc kubenswrapper[4799]: I0121 17:47:36.830943 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-22zcw" event={"ID":"fef3455e-f779-404d-bf2c-40703b3b3b05","Type":"ContainerStarted","Data":"c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f"} Jan 21 17:47:36 crc kubenswrapper[4799]: I0121 17:47:36.875326 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-22zcw" podStartSLOduration=2.207830844 podStartE2EDuration="3.875283739s" podCreationTimestamp="2026-01-21 17:47:33 +0000 UTC" firstStartedPulling="2026-01-21 17:47:34.339650921 +0000 UTC m=+880.965940944" lastFinishedPulling="2026-01-21 17:47:36.007103816 +0000 UTC m=+882.633393839" observedRunningTime="2026-01-21 17:47:36.868062155 +0000 UTC m=+883.494352188" watchObservedRunningTime="2026-01-21 17:47:36.875283739 +0000 UTC m=+883.501573752" Jan 21 17:47:37 crc kubenswrapper[4799]: I0121 17:47:37.501877 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-22zcw"] Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.115789 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-kwk6b"] Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.117481 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.132236 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-kwk6b"] Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.220173 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqrnz\" (UniqueName: \"kubernetes.io/projected/4ef61afc-f214-4ffd-875e-b8c8dfb2426e-kube-api-access-tqrnz\") pod \"openstack-operator-index-kwk6b\" (UID: \"4ef61afc-f214-4ffd-875e-b8c8dfb2426e\") " pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.322238 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqrnz\" (UniqueName: \"kubernetes.io/projected/4ef61afc-f214-4ffd-875e-b8c8dfb2426e-kube-api-access-tqrnz\") pod \"openstack-operator-index-kwk6b\" (UID: \"4ef61afc-f214-4ffd-875e-b8c8dfb2426e\") " pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.345077 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqrnz\" (UniqueName: \"kubernetes.io/projected/4ef61afc-f214-4ffd-875e-b8c8dfb2426e-kube-api-access-tqrnz\") pod \"openstack-operator-index-kwk6b\" (UID: \"4ef61afc-f214-4ffd-875e-b8c8dfb2426e\") " pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.450776 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.667330 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-74wj8" Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.850534 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-22zcw" podUID="fef3455e-f779-404d-bf2c-40703b3b3b05" containerName="registry-server" containerID="cri-o://c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f" gracePeriod=2 Jan 21 17:47:38 crc kubenswrapper[4799]: I0121 17:47:38.882671 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-kwk6b"] Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.202911 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-22zcw" Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.339826 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b57lg\" (UniqueName: \"kubernetes.io/projected/fef3455e-f779-404d-bf2c-40703b3b3b05-kube-api-access-b57lg\") pod \"fef3455e-f779-404d-bf2c-40703b3b3b05\" (UID: \"fef3455e-f779-404d-bf2c-40703b3b3b05\") " Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.346894 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fef3455e-f779-404d-bf2c-40703b3b3b05-kube-api-access-b57lg" (OuterVolumeSpecName: "kube-api-access-b57lg") pod "fef3455e-f779-404d-bf2c-40703b3b3b05" (UID: "fef3455e-f779-404d-bf2c-40703b3b3b05"). InnerVolumeSpecName "kube-api-access-b57lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.443056 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b57lg\" (UniqueName: \"kubernetes.io/projected/fef3455e-f779-404d-bf2c-40703b3b3b05-kube-api-access-b57lg\") on node \"crc\" DevicePath \"\"" Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.862052 4799 generic.go:334] "Generic (PLEG): container finished" podID="fef3455e-f779-404d-bf2c-40703b3b3b05" containerID="c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f" exitCode=0 Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.862175 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-22zcw" event={"ID":"fef3455e-f779-404d-bf2c-40703b3b3b05","Type":"ContainerDied","Data":"c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f"} Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.862305 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-22zcw" event={"ID":"fef3455e-f779-404d-bf2c-40703b3b3b05","Type":"ContainerDied","Data":"54f8c43f7b57a556d51eb44a22342f0d3a21904e8e1c90844931032244be4d4a"} Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.862342 4799 scope.go:117] "RemoveContainer" containerID="c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f" Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.862524 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-22zcw" Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.872455 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kwk6b" event={"ID":"4ef61afc-f214-4ffd-875e-b8c8dfb2426e","Type":"ContainerStarted","Data":"c3b39a805031636be830e1ade6aeb53952eb5340b83a3bea3af44f88f6b944a8"} Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.872576 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kwk6b" event={"ID":"4ef61afc-f214-4ffd-875e-b8c8dfb2426e","Type":"ContainerStarted","Data":"5a4664889083380c97b597bd1f71ca548917e778551925ede5cb2b0ded958f4e"} Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.911410 4799 scope.go:117] "RemoveContainer" containerID="c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f" Jan 21 17:47:39 crc kubenswrapper[4799]: E0121 17:47:39.913180 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f\": container with ID starting with c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f not found: ID does not exist" containerID="c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f" Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.913256 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f"} err="failed to get container status \"c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f\": rpc error: code = NotFound desc = could not find container \"c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f\": container with ID starting with c6f3df27f34fda800ca2a33c154ed0cfb35d92e7d7cc0a7ef0d727cba1cc360f not found: ID does not exist" Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.918029 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-kwk6b" podStartSLOduration=1.8596127070000001 podStartE2EDuration="1.918006318s" podCreationTimestamp="2026-01-21 17:47:38 +0000 UTC" firstStartedPulling="2026-01-21 17:47:38.892699312 +0000 UTC m=+885.518989335" lastFinishedPulling="2026-01-21 17:47:38.951092923 +0000 UTC m=+885.577382946" observedRunningTime="2026-01-21 17:47:39.900782841 +0000 UTC m=+886.527072924" watchObservedRunningTime="2026-01-21 17:47:39.918006318 +0000 UTC m=+886.544296351" Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.927581 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-22zcw"] Jan 21 17:47:39 crc kubenswrapper[4799]: I0121 17:47:39.944451 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-22zcw"] Jan 21 17:47:40 crc kubenswrapper[4799]: I0121 17:47:40.215173 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fef3455e-f779-404d-bf2c-40703b3b3b05" path="/var/lib/kubelet/pods/fef3455e-f779-404d-bf2c-40703b3b3b05/volumes" Jan 21 17:47:48 crc kubenswrapper[4799]: I0121 17:47:48.451694 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:48 crc kubenswrapper[4799]: I0121 17:47:48.452668 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:48 crc kubenswrapper[4799]: I0121 17:47:48.485316 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:48 crc kubenswrapper[4799]: I0121 17:47:48.976902 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-kwk6b" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.584057 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns"] Jan 21 17:47:54 crc kubenswrapper[4799]: E0121 17:47:54.585071 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef3455e-f779-404d-bf2c-40703b3b3b05" containerName="registry-server" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.585099 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef3455e-f779-404d-bf2c-40703b3b3b05" containerName="registry-server" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.585337 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fef3455e-f779-404d-bf2c-40703b3b3b05" containerName="registry-server" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.586368 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.589343 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7cth8" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.602701 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns"] Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.731728 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-util\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.731804 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-bundle\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.731830 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pswb\" (UniqueName: \"kubernetes.io/projected/ba033264-742d-42f8-b688-5d0f8a853360-kube-api-access-2pswb\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.833561 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-util\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.833708 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-bundle\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.833768 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pswb\" (UniqueName: \"kubernetes.io/projected/ba033264-742d-42f8-b688-5d0f8a853360-kube-api-access-2pswb\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.834607 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-util\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.834665 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-bundle\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.860515 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pswb\" (UniqueName: \"kubernetes.io/projected/ba033264-742d-42f8-b688-5d0f8a853360-kube-api-access-2pswb\") pod \"0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:54 crc kubenswrapper[4799]: I0121 17:47:54.908775 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:55 crc kubenswrapper[4799]: I0121 17:47:55.364285 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns"] Jan 21 17:47:56 crc kubenswrapper[4799]: I0121 17:47:56.028013 4799 generic.go:334] "Generic (PLEG): container finished" podID="ba033264-742d-42f8-b688-5d0f8a853360" containerID="7f8c99fd1e91a3f44290a5b7f10d07f3c9b513d73b85c4487469d8928d9eb5ac" exitCode=0 Jan 21 17:47:56 crc kubenswrapper[4799]: I0121 17:47:56.028229 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" event={"ID":"ba033264-742d-42f8-b688-5d0f8a853360","Type":"ContainerDied","Data":"7f8c99fd1e91a3f44290a5b7f10d07f3c9b513d73b85c4487469d8928d9eb5ac"} Jan 21 17:47:56 crc kubenswrapper[4799]: I0121 17:47:56.028476 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" event={"ID":"ba033264-742d-42f8-b688-5d0f8a853360","Type":"ContainerStarted","Data":"d840d7e45ca21ecbe33a3a03f3b6dedbb00fdc50e5702dc79ccbc486ccb437f0"} Jan 21 17:47:57 crc kubenswrapper[4799]: I0121 17:47:57.042094 4799 generic.go:334] "Generic (PLEG): container finished" podID="ba033264-742d-42f8-b688-5d0f8a853360" containerID="accd9b2db53608894ca8927d4aed23b285a1eecf746745407eb7f84c00823ffa" exitCode=0 Jan 21 17:47:57 crc kubenswrapper[4799]: I0121 17:47:57.042220 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" event={"ID":"ba033264-742d-42f8-b688-5d0f8a853360","Type":"ContainerDied","Data":"accd9b2db53608894ca8927d4aed23b285a1eecf746745407eb7f84c00823ffa"} Jan 21 17:47:58 crc kubenswrapper[4799]: I0121 17:47:58.061761 4799 generic.go:334] "Generic (PLEG): container finished" podID="ba033264-742d-42f8-b688-5d0f8a853360" containerID="1d0ce8cb1a71dd41c1f68c99c19356ac77da1698896b8c62f6be371bb9a4690d" exitCode=0 Jan 21 17:47:58 crc kubenswrapper[4799]: I0121 17:47:58.061980 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" event={"ID":"ba033264-742d-42f8-b688-5d0f8a853360","Type":"ContainerDied","Data":"1d0ce8cb1a71dd41c1f68c99c19356ac77da1698896b8c62f6be371bb9a4690d"} Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.363111 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.521306 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pswb\" (UniqueName: \"kubernetes.io/projected/ba033264-742d-42f8-b688-5d0f8a853360-kube-api-access-2pswb\") pod \"ba033264-742d-42f8-b688-5d0f8a853360\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.521469 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-bundle\") pod \"ba033264-742d-42f8-b688-5d0f8a853360\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.521536 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-util\") pod \"ba033264-742d-42f8-b688-5d0f8a853360\" (UID: \"ba033264-742d-42f8-b688-5d0f8a853360\") " Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.522645 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-bundle" (OuterVolumeSpecName: "bundle") pod "ba033264-742d-42f8-b688-5d0f8a853360" (UID: "ba033264-742d-42f8-b688-5d0f8a853360"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.534692 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba033264-742d-42f8-b688-5d0f8a853360-kube-api-access-2pswb" (OuterVolumeSpecName: "kube-api-access-2pswb") pod "ba033264-742d-42f8-b688-5d0f8a853360" (UID: "ba033264-742d-42f8-b688-5d0f8a853360"). InnerVolumeSpecName "kube-api-access-2pswb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.558097 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-util" (OuterVolumeSpecName: "util") pod "ba033264-742d-42f8-b688-5d0f8a853360" (UID: "ba033264-742d-42f8-b688-5d0f8a853360"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.624243 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pswb\" (UniqueName: \"kubernetes.io/projected/ba033264-742d-42f8-b688-5d0f8a853360-kube-api-access-2pswb\") on node \"crc\" DevicePath \"\"" Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.624302 4799 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:47:59 crc kubenswrapper[4799]: I0121 17:47:59.624315 4799 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba033264-742d-42f8-b688-5d0f8a853360-util\") on node \"crc\" DevicePath \"\"" Jan 21 17:48:00 crc kubenswrapper[4799]: I0121 17:48:00.084197 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" event={"ID":"ba033264-742d-42f8-b688-5d0f8a853360","Type":"ContainerDied","Data":"d840d7e45ca21ecbe33a3a03f3b6dedbb00fdc50e5702dc79ccbc486ccb437f0"} Jan 21 17:48:00 crc kubenswrapper[4799]: I0121 17:48:00.084249 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d840d7e45ca21ecbe33a3a03f3b6dedbb00fdc50e5702dc79ccbc486ccb437f0" Jan 21 17:48:00 crc kubenswrapper[4799]: I0121 17:48:00.084290 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.238352 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt"] Jan 21 17:48:02 crc kubenswrapper[4799]: E0121 17:48:02.239224 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba033264-742d-42f8-b688-5d0f8a853360" containerName="util" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.239245 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba033264-742d-42f8-b688-5d0f8a853360" containerName="util" Jan 21 17:48:02 crc kubenswrapper[4799]: E0121 17:48:02.239275 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba033264-742d-42f8-b688-5d0f8a853360" containerName="pull" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.239282 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba033264-742d-42f8-b688-5d0f8a853360" containerName="pull" Jan 21 17:48:02 crc kubenswrapper[4799]: E0121 17:48:02.239309 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba033264-742d-42f8-b688-5d0f8a853360" containerName="extract" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.239317 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba033264-742d-42f8-b688-5d0f8a853360" containerName="extract" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.239460 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba033264-742d-42f8-b688-5d0f8a853360" containerName="extract" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.240259 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.242670 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-pf2z2" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.299797 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nzfm\" (UniqueName: \"kubernetes.io/projected/35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d-kube-api-access-7nzfm\") pod \"openstack-operator-controller-init-6664d49b67-ncnqt\" (UID: \"35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d\") " pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.328658 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt"] Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.401363 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nzfm\" (UniqueName: \"kubernetes.io/projected/35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d-kube-api-access-7nzfm\") pod \"openstack-operator-controller-init-6664d49b67-ncnqt\" (UID: \"35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d\") " pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.417215 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nzfm\" (UniqueName: \"kubernetes.io/projected/35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d-kube-api-access-7nzfm\") pod \"openstack-operator-controller-init-6664d49b67-ncnqt\" (UID: \"35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d\") " pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.613066 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.847816 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt"] Jan 21 17:48:02 crc kubenswrapper[4799]: I0121 17:48:02.856789 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 17:48:03 crc kubenswrapper[4799]: I0121 17:48:03.131678 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" event={"ID":"35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d","Type":"ContainerStarted","Data":"9899c077d051750caaa110f362578cba80c71dd9460b3b80c5da8090ab76230f"} Jan 21 17:48:08 crc kubenswrapper[4799]: I0121 17:48:08.201399 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" event={"ID":"35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d","Type":"ContainerStarted","Data":"011673bfce0dc9f51151c1e49822604248f69636899d920ae46a1db71aff6dc3"} Jan 21 17:48:08 crc kubenswrapper[4799]: I0121 17:48:08.203353 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" Jan 21 17:48:08 crc kubenswrapper[4799]: I0121 17:48:08.239052 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" podStartSLOduration=1.656994744 podStartE2EDuration="6.239017601s" podCreationTimestamp="2026-01-21 17:48:02 +0000 UTC" firstStartedPulling="2026-01-21 17:48:02.85641496 +0000 UTC m=+909.482704983" lastFinishedPulling="2026-01-21 17:48:07.438437817 +0000 UTC m=+914.064727840" observedRunningTime="2026-01-21 17:48:08.233429083 +0000 UTC m=+914.859719106" watchObservedRunningTime="2026-01-21 17:48:08.239017601 +0000 UTC m=+914.865307624" Jan 21 17:48:12 crc kubenswrapper[4799]: I0121 17:48:12.616483 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-6664d49b67-ncnqt" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.098935 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tqgcv"] Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.105794 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.109491 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tqgcv"] Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.129531 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-utilities\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.129573 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-catalog-content\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.129607 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fb7v\" (UniqueName: \"kubernetes.io/projected/f25be100-1ae0-4419-b2b3-82140d80878e-kube-api-access-2fb7v\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.231758 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-utilities\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.232153 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-catalog-content\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.232262 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fb7v\" (UniqueName: \"kubernetes.io/projected/f25be100-1ae0-4419-b2b3-82140d80878e-kube-api-access-2fb7v\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.232449 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-utilities\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.233684 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-catalog-content\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.263141 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fb7v\" (UniqueName: \"kubernetes.io/projected/f25be100-1ae0-4419-b2b3-82140d80878e-kube-api-access-2fb7v\") pod \"redhat-marketplace-tqgcv\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.434038 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:23 crc kubenswrapper[4799]: I0121 17:48:23.708237 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tqgcv"] Jan 21 17:48:24 crc kubenswrapper[4799]: I0121 17:48:24.470890 4799 generic.go:334] "Generic (PLEG): container finished" podID="f25be100-1ae0-4419-b2b3-82140d80878e" containerID="0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68" exitCode=0 Jan 21 17:48:24 crc kubenswrapper[4799]: I0121 17:48:24.471020 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tqgcv" event={"ID":"f25be100-1ae0-4419-b2b3-82140d80878e","Type":"ContainerDied","Data":"0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68"} Jan 21 17:48:24 crc kubenswrapper[4799]: I0121 17:48:24.471270 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tqgcv" event={"ID":"f25be100-1ae0-4419-b2b3-82140d80878e","Type":"ContainerStarted","Data":"1f3c996e313210cd6a5ed5cace8546b4d3e365e96473c14e4b5f7c01f04543ed"} Jan 21 17:48:26 crc kubenswrapper[4799]: I0121 17:48:26.490660 4799 generic.go:334] "Generic (PLEG): container finished" podID="f25be100-1ae0-4419-b2b3-82140d80878e" containerID="ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19" exitCode=0 Jan 21 17:48:26 crc kubenswrapper[4799]: I0121 17:48:26.490751 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tqgcv" event={"ID":"f25be100-1ae0-4419-b2b3-82140d80878e","Type":"ContainerDied","Data":"ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19"} Jan 21 17:48:27 crc kubenswrapper[4799]: I0121 17:48:27.503229 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tqgcv" event={"ID":"f25be100-1ae0-4419-b2b3-82140d80878e","Type":"ContainerStarted","Data":"21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5"} Jan 21 17:48:27 crc kubenswrapper[4799]: I0121 17:48:27.538586 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tqgcv" podStartSLOduration=2.041826548 podStartE2EDuration="4.538538701s" podCreationTimestamp="2026-01-21 17:48:23 +0000 UTC" firstStartedPulling="2026-01-21 17:48:24.472849884 +0000 UTC m=+931.099139907" lastFinishedPulling="2026-01-21 17:48:26.969562037 +0000 UTC m=+933.595852060" observedRunningTime="2026-01-21 17:48:27.537245775 +0000 UTC m=+934.163535798" watchObservedRunningTime="2026-01-21 17:48:27.538538701 +0000 UTC m=+934.164828714" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.049054 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-djnlm"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.051746 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.068601 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-djnlm"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.206824 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-catalog-content\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.207057 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-utilities\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.207160 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rqpg\" (UniqueName: \"kubernetes.io/projected/4e0327f8-1c8b-43c2-b462-1c60671d91d5-kube-api-access-6rqpg\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.308207 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-utilities\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.308267 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rqpg\" (UniqueName: \"kubernetes.io/projected/4e0327f8-1c8b-43c2-b462-1c60671d91d5-kube-api-access-6rqpg\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.308340 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-catalog-content\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.308810 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-catalog-content\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.309043 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-utilities\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.341726 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rqpg\" (UniqueName: \"kubernetes.io/projected/4e0327f8-1c8b-43c2-b462-1c60671d91d5-kube-api-access-6rqpg\") pod \"certified-operators-djnlm\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.382167 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.779447 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.782390 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.795046 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-bdzfv" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.810052 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.811577 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.819655 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-s577l" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.824503 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.831534 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccl8s\" (UniqueName: \"kubernetes.io/projected/aa887ea8-0375-49c1-b802-9b3c8468fa87-kube-api-access-ccl8s\") pod \"cinder-operator-controller-manager-9b68f5989-6gbp4\" (UID: \"aa887ea8-0375-49c1-b802-9b3c8468fa87\") " pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.833364 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8sd6\" (UniqueName: \"kubernetes.io/projected/99a0338e-5d7f-47cd-a30f-8c57ab921724-kube-api-access-h8sd6\") pod \"barbican-operator-controller-manager-7ddb5c749-xgqnz\" (UID: \"99a0338e-5d7f-47cd-a30f-8c57ab921724\") " pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.842283 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.843722 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.848964 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-dpt5n" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.862413 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.902763 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-msz6d"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.904508 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.915557 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-47vpf" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.918704 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.935019 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-msz6d"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.946958 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8sd6\" (UniqueName: \"kubernetes.io/projected/99a0338e-5d7f-47cd-a30f-8c57ab921724-kube-api-access-h8sd6\") pod \"barbican-operator-controller-manager-7ddb5c749-xgqnz\" (UID: \"99a0338e-5d7f-47cd-a30f-8c57ab921724\") " pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.947196 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2jxd\" (UniqueName: \"kubernetes.io/projected/0e8e19fd-c988-48ce-9150-1b46974bd86e-kube-api-access-f2jxd\") pod \"designate-operator-controller-manager-9f958b845-2qd6s\" (UID: \"0e8e19fd-c988-48ce-9150-1b46974bd86e\") " pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.947334 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgvvt\" (UniqueName: \"kubernetes.io/projected/10ffe97a-fa49-481f-9e79-55627ab24692-kube-api-access-mgvvt\") pod \"glance-operator-controller-manager-c6994669c-msz6d\" (UID: \"10ffe97a-fa49-481f-9e79-55627ab24692\") " pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.947451 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccl8s\" (UniqueName: \"kubernetes.io/projected/aa887ea8-0375-49c1-b802-9b3c8468fa87-kube-api-access-ccl8s\") pod \"cinder-operator-controller-manager-9b68f5989-6gbp4\" (UID: \"aa887ea8-0375-49c1-b802-9b3c8468fa87\") " pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.966203 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv"] Jan 21 17:48:32 crc kubenswrapper[4799]: I0121 17:48:32.973152 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.000477 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-m8r8c" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.004182 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8sd6\" (UniqueName: \"kubernetes.io/projected/99a0338e-5d7f-47cd-a30f-8c57ab921724-kube-api-access-h8sd6\") pod \"barbican-operator-controller-manager-7ddb5c749-xgqnz\" (UID: \"99a0338e-5d7f-47cd-a30f-8c57ab921724\") " pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.013569 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccl8s\" (UniqueName: \"kubernetes.io/projected/aa887ea8-0375-49c1-b802-9b3c8468fa87-kube-api-access-ccl8s\") pod \"cinder-operator-controller-manager-9b68f5989-6gbp4\" (UID: \"aa887ea8-0375-49c1-b802-9b3c8468fa87\") " pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.039468 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.053997 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.053988 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkn2x\" (UniqueName: \"kubernetes.io/projected/70fddebf-b616-47bd-a139-d2a4999624dd-kube-api-access-fkn2x\") pod \"heat-operator-controller-manager-594c8c9d5d-tslfv\" (UID: \"70fddebf-b616-47bd-a139-d2a4999624dd\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.054322 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2jxd\" (UniqueName: \"kubernetes.io/projected/0e8e19fd-c988-48ce-9150-1b46974bd86e-kube-api-access-f2jxd\") pod \"designate-operator-controller-manager-9f958b845-2qd6s\" (UID: \"0e8e19fd-c988-48ce-9150-1b46974bd86e\") " pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.054375 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgvvt\" (UniqueName: \"kubernetes.io/projected/10ffe97a-fa49-481f-9e79-55627ab24692-kube-api-access-mgvvt\") pod \"glance-operator-controller-manager-c6994669c-msz6d\" (UID: \"10ffe97a-fa49-481f-9e79-55627ab24692\") " pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.055589 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.062362 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-fvsps" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.086814 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2jxd\" (UniqueName: \"kubernetes.io/projected/0e8e19fd-c988-48ce-9150-1b46974bd86e-kube-api-access-f2jxd\") pod \"designate-operator-controller-manager-9f958b845-2qd6s\" (UID: \"0e8e19fd-c988-48ce-9150-1b46974bd86e\") " pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.106784 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.108024 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.112644 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-6bh8l" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.112867 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.115049 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgvvt\" (UniqueName: \"kubernetes.io/projected/10ffe97a-fa49-481f-9e79-55627ab24692-kube-api-access-mgvvt\") pod \"glance-operator-controller-manager-c6994669c-msz6d\" (UID: \"10ffe97a-fa49-481f-9e79-55627ab24692\") " pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.129382 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.131234 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.154821 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcg56\" (UniqueName: \"kubernetes.io/projected/ac9f205a-3d30-4ca3-b253-32c441466211-kube-api-access-bcg56\") pod \"horizon-operator-controller-manager-77d5c5b54f-ff7f6\" (UID: \"ac9f205a-3d30-4ca3-b253-32c441466211\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.154885 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkn2x\" (UniqueName: \"kubernetes.io/projected/70fddebf-b616-47bd-a139-d2a4999624dd-kube-api-access-fkn2x\") pod \"heat-operator-controller-manager-594c8c9d5d-tslfv\" (UID: \"70fddebf-b616-47bd-a139-d2a4999624dd\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.155011 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmvkt\" (UniqueName: \"kubernetes.io/projected/7654ac1c-746c-46e6-b276-e9f6a839a187-kube-api-access-vmvkt\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.155049 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.166417 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.168073 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.178659 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.180838 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-gnzrq" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.183617 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.197209 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.226922 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkn2x\" (UniqueName: \"kubernetes.io/projected/70fddebf-b616-47bd-a139-d2a4999624dd-kube-api-access-fkn2x\") pod \"heat-operator-controller-manager-594c8c9d5d-tslfv\" (UID: \"70fddebf-b616-47bd-a139-d2a4999624dd\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.227421 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.256200 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.258387 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcg56\" (UniqueName: \"kubernetes.io/projected/ac9f205a-3d30-4ca3-b253-32c441466211-kube-api-access-bcg56\") pod \"horizon-operator-controller-manager-77d5c5b54f-ff7f6\" (UID: \"ac9f205a-3d30-4ca3-b253-32c441466211\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.258591 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmvkt\" (UniqueName: \"kubernetes.io/projected/7654ac1c-746c-46e6-b276-e9f6a839a187-kube-api-access-vmvkt\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.258630 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:33 crc kubenswrapper[4799]: E0121 17:48:33.258790 4799 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:33 crc kubenswrapper[4799]: E0121 17:48:33.258908 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert podName:7654ac1c-746c-46e6-b276-e9f6a839a187 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:33.7588487 +0000 UTC m=+940.385138723 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert") pod "infra-operator-controller-manager-77c48c7859-ffgnr" (UID: "7654ac1c-746c-46e6-b276-e9f6a839a187") : secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.279200 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.280636 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.287531 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-99t8w" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.295346 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.301274 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcg56\" (UniqueName: \"kubernetes.io/projected/ac9f205a-3d30-4ca3-b253-32c441466211-kube-api-access-bcg56\") pod \"horizon-operator-controller-manager-77d5c5b54f-ff7f6\" (UID: \"ac9f205a-3d30-4ca3-b253-32c441466211\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.302661 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.304004 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmvkt\" (UniqueName: \"kubernetes.io/projected/7654ac1c-746c-46e6-b276-e9f6a839a187-kube-api-access-vmvkt\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.308153 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-q6bxw" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.314633 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.360189 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqpnj\" (UniqueName: \"kubernetes.io/projected/3a0e1cc6-500f-4493-8a18-0eeea206a4f7-kube-api-access-tqpnj\") pod \"ironic-operator-controller-manager-78757b4889-75r9k\" (UID: \"3a0e1cc6-500f-4493-8a18-0eeea206a4f7\") " pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.378614 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.391478 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.419541 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.430108 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.431639 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.435949 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.437644 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.437764 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-v2lbn" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.449627 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.462993 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqpnj\" (UniqueName: \"kubernetes.io/projected/3a0e1cc6-500f-4493-8a18-0eeea206a4f7-kube-api-access-tqpnj\") pod \"ironic-operator-controller-manager-78757b4889-75r9k\" (UID: \"3a0e1cc6-500f-4493-8a18-0eeea206a4f7\") " pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.463085 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqr76\" (UniqueName: \"kubernetes.io/projected/f90c4327-642d-4efd-90d3-7d3b83dbcfc9-kube-api-access-nqr76\") pod \"manila-operator-controller-manager-864f6b75bf-4ttvx\" (UID: \"f90c4327-642d-4efd-90d3-7d3b83dbcfc9\") " pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.463168 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8t2p\" (UniqueName: \"kubernetes.io/projected/223724ab-b9ee-4f55-b1ab-bf730a6314f9-kube-api-access-b8t2p\") pod \"keystone-operator-controller-manager-767fdc4f47-rw2zn\" (UID: \"223724ab-b9ee-4f55-b1ab-bf730a6314f9\") " pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.478339 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.479625 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.488083 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-c9dq7" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.520650 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqpnj\" (UniqueName: \"kubernetes.io/projected/3a0e1cc6-500f-4493-8a18-0eeea206a4f7-kube-api-access-tqpnj\") pod \"ironic-operator-controller-manager-78757b4889-75r9k\" (UID: \"3a0e1cc6-500f-4493-8a18-0eeea206a4f7\") " pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.533611 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.574303 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-djnlm"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.581306 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8t2p\" (UniqueName: \"kubernetes.io/projected/223724ab-b9ee-4f55-b1ab-bf730a6314f9-kube-api-access-b8t2p\") pod \"keystone-operator-controller-manager-767fdc4f47-rw2zn\" (UID: \"223724ab-b9ee-4f55-b1ab-bf730a6314f9\") " pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.581402 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njxzg\" (UniqueName: \"kubernetes.io/projected/b35d565f-4d9f-437a-add9-8ef40d891e99-kube-api-access-njxzg\") pod \"mariadb-operator-controller-manager-c87fff755-bz4tf\" (UID: \"b35d565f-4d9f-437a-add9-8ef40d891e99\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.581443 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6pn7\" (UniqueName: \"kubernetes.io/projected/ed464d3c-bdd7-4b19-a332-402ddeccb65b-kube-api-access-j6pn7\") pod \"neutron-operator-controller-manager-cb4666565-rwglk\" (UID: \"ed464d3c-bdd7-4b19-a332-402ddeccb65b\") " pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.581599 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqr76\" (UniqueName: \"kubernetes.io/projected/f90c4327-642d-4efd-90d3-7d3b83dbcfc9-kube-api-access-nqr76\") pod \"manila-operator-controller-manager-864f6b75bf-4ttvx\" (UID: \"f90c4327-642d-4efd-90d3-7d3b83dbcfc9\") " pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.596769 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-v552f"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.597802 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.602412 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-5cxvc" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.608178 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.609404 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.617967 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-qqcbp" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.622345 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.627285 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqr76\" (UniqueName: \"kubernetes.io/projected/f90c4327-642d-4efd-90d3-7d3b83dbcfc9-kube-api-access-nqr76\") pod \"manila-operator-controller-manager-864f6b75bf-4ttvx\" (UID: \"f90c4327-642d-4efd-90d3-7d3b83dbcfc9\") " pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.631712 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8t2p\" (UniqueName: \"kubernetes.io/projected/223724ab-b9ee-4f55-b1ab-bf730a6314f9-kube-api-access-b8t2p\") pod \"keystone-operator-controller-manager-767fdc4f47-rw2zn\" (UID: \"223724ab-b9ee-4f55-b1ab-bf730a6314f9\") " pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.632652 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-v552f"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.643967 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.656224 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.661761 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnlm" event={"ID":"4e0327f8-1c8b-43c2-b462-1c60671d91d5","Type":"ContainerStarted","Data":"5dc21a9dc23f6f4a3e37825a560c71b48eeb755c26fb30a0a76142d13b757c98"} Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.663552 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.664446 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.669940 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.670679 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-prhqz" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.700112 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njxzg\" (UniqueName: \"kubernetes.io/projected/b35d565f-4d9f-437a-add9-8ef40d891e99-kube-api-access-njxzg\") pod \"mariadb-operator-controller-manager-c87fff755-bz4tf\" (UID: \"b35d565f-4d9f-437a-add9-8ef40d891e99\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.700467 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6pn7\" (UniqueName: \"kubernetes.io/projected/ed464d3c-bdd7-4b19-a332-402ddeccb65b-kube-api-access-j6pn7\") pod \"neutron-operator-controller-manager-cb4666565-rwglk\" (UID: \"ed464d3c-bdd7-4b19-a332-402ddeccb65b\") " pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.713276 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.714455 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.722681 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-ch66f" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.748200 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6pn7\" (UniqueName: \"kubernetes.io/projected/ed464d3c-bdd7-4b19-a332-402ddeccb65b-kube-api-access-j6pn7\") pod \"neutron-operator-controller-manager-cb4666565-rwglk\" (UID: \"ed464d3c-bdd7-4b19-a332-402ddeccb65b\") " pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.749749 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njxzg\" (UniqueName: \"kubernetes.io/projected/b35d565f-4d9f-437a-add9-8ef40d891e99-kube-api-access-njxzg\") pod \"mariadb-operator-controller-manager-c87fff755-bz4tf\" (UID: \"b35d565f-4d9f-437a-add9-8ef40d891e99\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.753656 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.755556 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.760009 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-68bbt" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.772589 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.790810 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.800601 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.805707 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.813387 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.825233 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.826943 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch8jm\" (UniqueName: \"kubernetes.io/projected/e898f43a-2487-48f8-9615-f02fdbd9eb30-kube-api-access-ch8jm\") pod \"placement-operator-controller-manager-686df47fcb-2b24b\" (UID: \"e898f43a-2487-48f8-9615-f02fdbd9eb30\") " pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.827087 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4glx\" (UniqueName: \"kubernetes.io/projected/4d7fd266-ebc9-46f2-9355-4dac2699822c-kube-api-access-p4glx\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.827190 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68g7k\" (UniqueName: \"kubernetes.io/projected/b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1-kube-api-access-68g7k\") pod \"octavia-operator-controller-manager-7fc9b76cf6-9wst2\" (UID: \"b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1\") " pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.827258 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c7fb\" (UniqueName: \"kubernetes.io/projected/11e4c63f-cdc3-4d50-a4e7-03386747ca86-kube-api-access-4c7fb\") pod \"nova-operator-controller-manager-65849867d6-v552f\" (UID: \"11e4c63f-cdc3-4d50-a4e7-03386747ca86\") " pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.827319 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.827402 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg4cb\" (UniqueName: \"kubernetes.io/projected/92a8c35c-6ef9-4453-9233-df8579764cd2-kube-api-access-bg4cb\") pod \"ovn-operator-controller-manager-55db956ddc-7nxnh\" (UID: \"92a8c35c-6ef9-4453-9233-df8579764cd2\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" Jan 21 17:48:33 crc kubenswrapper[4799]: E0121 17:48:33.827663 4799 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:33 crc kubenswrapper[4799]: E0121 17:48:33.827721 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert podName:7654ac1c-746c-46e6-b276-e9f6a839a187 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:34.827705761 +0000 UTC m=+941.453995784 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert") pod "infra-operator-controller-manager-77c48c7859-ffgnr" (UID: "7654ac1c-746c-46e6-b276-e9f6a839a187") : secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.832362 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.833753 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b"] Jan 21 17:48:33 crc kubenswrapper[4799]: I0121 17:48:33.869922 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.078628 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.078811 4799 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.078828 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch8jm\" (UniqueName: \"kubernetes.io/projected/e898f43a-2487-48f8-9615-f02fdbd9eb30-kube-api-access-ch8jm\") pod \"placement-operator-controller-manager-686df47fcb-2b24b\" (UID: \"e898f43a-2487-48f8-9615-f02fdbd9eb30\") " pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.079070 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert podName:4d7fd266-ebc9-46f2-9355-4dac2699822c nodeName:}" failed. No retries permitted until 2026-01-21 17:48:34.578847682 +0000 UTC m=+941.205137705 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" (UID: "4d7fd266-ebc9-46f2-9355-4dac2699822c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.079092 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4glx\" (UniqueName: \"kubernetes.io/projected/4d7fd266-ebc9-46f2-9355-4dac2699822c-kube-api-access-p4glx\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.079191 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68g7k\" (UniqueName: \"kubernetes.io/projected/b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1-kube-api-access-68g7k\") pod \"octavia-operator-controller-manager-7fc9b76cf6-9wst2\" (UID: \"b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1\") " pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.079594 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c7fb\" (UniqueName: \"kubernetes.io/projected/11e4c63f-cdc3-4d50-a4e7-03386747ca86-kube-api-access-4c7fb\") pod \"nova-operator-controller-manager-65849867d6-v552f\" (UID: \"11e4c63f-cdc3-4d50-a4e7-03386747ca86\") " pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.079696 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg4cb\" (UniqueName: \"kubernetes.io/projected/92a8c35c-6ef9-4453-9233-df8579764cd2-kube-api-access-bg4cb\") pod \"ovn-operator-controller-manager-55db956ddc-7nxnh\" (UID: \"92a8c35c-6ef9-4453-9233-df8579764cd2\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.080004 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.083345 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.089264 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-dqt66" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.091524 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.093453 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.097146 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-gk7q7" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.097308 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.104286 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.108820 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.110114 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.120667 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch8jm\" (UniqueName: \"kubernetes.io/projected/e898f43a-2487-48f8-9615-f02fdbd9eb30-kube-api-access-ch8jm\") pod \"placement-operator-controller-manager-686df47fcb-2b24b\" (UID: \"e898f43a-2487-48f8-9615-f02fdbd9eb30\") " pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.122611 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-mhv6g" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.135464 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.151721 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68g7k\" (UniqueName: \"kubernetes.io/projected/b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1-kube-api-access-68g7k\") pod \"octavia-operator-controller-manager-7fc9b76cf6-9wst2\" (UID: \"b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1\") " pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.152378 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg4cb\" (UniqueName: \"kubernetes.io/projected/92a8c35c-6ef9-4453-9233-df8579764cd2-kube-api-access-bg4cb\") pod \"ovn-operator-controller-manager-55db956ddc-7nxnh\" (UID: \"92a8c35c-6ef9-4453-9233-df8579764cd2\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.155375 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.156044 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4glx\" (UniqueName: \"kubernetes.io/projected/4d7fd266-ebc9-46f2-9355-4dac2699822c-kube-api-access-p4glx\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.162182 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.163412 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.171439 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c7fb\" (UniqueName: \"kubernetes.io/projected/11e4c63f-cdc3-4d50-a4e7-03386747ca86-kube-api-access-4c7fb\") pod \"nova-operator-controller-manager-65849867d6-v552f\" (UID: \"11e4c63f-cdc3-4d50-a4e7-03386747ca86\") " pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.172323 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-q26c7" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.174431 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.180835 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndmsh\" (UniqueName: \"kubernetes.io/projected/d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f-kube-api-access-ndmsh\") pod \"swift-operator-controller-manager-85dd56d4cc-dqwbg\" (UID: \"d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f\") " pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.181036 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv24p\" (UniqueName: \"kubernetes.io/projected/a2536e9b-8292-474d-ae06-00e4721120b3-kube-api-access-qv24p\") pod \"telemetry-operator-controller-manager-5f8f495fcf-n2lq2\" (UID: \"a2536e9b-8292-474d-ae06-00e4721120b3\") " pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.204318 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.205534 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.210983 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.211288 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.212419 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-sxnxn" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.243634 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.253532 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.254934 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.271201 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.281198 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-rxl98" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.282204 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jzdc\" (UniqueName: \"kubernetes.io/projected/44bcdd0c-5b20-4387-a105-c8f3fb661a6f-kube-api-access-8jzdc\") pod \"test-operator-controller-manager-7cd8bc9dbb-v4wk5\" (UID: \"44bcdd0c-5b20-4387-a105-c8f3fb661a6f\") " pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.282247 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndmsh\" (UniqueName: \"kubernetes.io/projected/d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f-kube-api-access-ndmsh\") pod \"swift-operator-controller-manager-85dd56d4cc-dqwbg\" (UID: \"d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f\") " pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.282388 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lhfk\" (UniqueName: \"kubernetes.io/projected/822a89c1-0086-49f1-9bee-6ac87a2af52a-kube-api-access-7lhfk\") pod \"watcher-operator-controller-manager-65f5896948-jrzsz\" (UID: \"822a89c1-0086-49f1-9bee-6ac87a2af52a\") " pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.282482 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv24p\" (UniqueName: \"kubernetes.io/projected/a2536e9b-8292-474d-ae06-00e4721120b3-kube-api-access-qv24p\") pod \"telemetry-operator-controller-manager-5f8f495fcf-n2lq2\" (UID: \"a2536e9b-8292-474d-ae06-00e4721120b3\") " pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.303249 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.312062 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndmsh\" (UniqueName: \"kubernetes.io/projected/d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f-kube-api-access-ndmsh\") pod \"swift-operator-controller-manager-85dd56d4cc-dqwbg\" (UID: \"d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f\") " pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.316928 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv24p\" (UniqueName: \"kubernetes.io/projected/a2536e9b-8292-474d-ae06-00e4721120b3-kube-api-access-qv24p\") pod \"telemetry-operator-controller-manager-5f8f495fcf-n2lq2\" (UID: \"a2536e9b-8292-474d-ae06-00e4721120b3\") " pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.342164 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.345951 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.385825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhkl7\" (UniqueName: \"kubernetes.io/projected/3e5a60e4-5801-4273-a08a-20907c8bed09-kube-api-access-jhkl7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hnnwf\" (UID: \"3e5a60e4-5801-4273-a08a-20907c8bed09\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.386135 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lhfk\" (UniqueName: \"kubernetes.io/projected/822a89c1-0086-49f1-9bee-6ac87a2af52a-kube-api-access-7lhfk\") pod \"watcher-operator-controller-manager-65f5896948-jrzsz\" (UID: \"822a89c1-0086-49f1-9bee-6ac87a2af52a\") " pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.391247 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.402415 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.402697 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxxps\" (UniqueName: \"kubernetes.io/projected/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-kube-api-access-pxxps\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.403012 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jzdc\" (UniqueName: \"kubernetes.io/projected/44bcdd0c-5b20-4387-a105-c8f3fb661a6f-kube-api-access-8jzdc\") pod \"test-operator-controller-manager-7cd8bc9dbb-v4wk5\" (UID: \"44bcdd0c-5b20-4387-a105-c8f3fb661a6f\") " pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.403078 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.556428 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lhfk\" (UniqueName: \"kubernetes.io/projected/822a89c1-0086-49f1-9bee-6ac87a2af52a-kube-api-access-7lhfk\") pod \"watcher-operator-controller-manager-65f5896948-jrzsz\" (UID: \"822a89c1-0086-49f1-9bee-6ac87a2af52a\") " pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.567732 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.572947 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.574752 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.579462 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.579538 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxxps\" (UniqueName: \"kubernetes.io/projected/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-kube-api-access-pxxps\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.579599 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.579704 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.579752 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhkl7\" (UniqueName: \"kubernetes.io/projected/3e5a60e4-5801-4273-a08a-20907c8bed09-kube-api-access-jhkl7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hnnwf\" (UID: \"3e5a60e4-5801-4273-a08a-20907c8bed09\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.580054 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jzdc\" (UniqueName: \"kubernetes.io/projected/44bcdd0c-5b20-4387-a105-c8f3fb661a6f-kube-api-access-8jzdc\") pod \"test-operator-controller-manager-7cd8bc9dbb-v4wk5\" (UID: \"44bcdd0c-5b20-4387-a105-c8f3fb661a6f\") " pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.580328 4799 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.580379 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert podName:4d7fd266-ebc9-46f2-9355-4dac2699822c nodeName:}" failed. No retries permitted until 2026-01-21 17:48:35.580358052 +0000 UTC m=+942.206648075 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" (UID: "4d7fd266-ebc9-46f2-9355-4dac2699822c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.580536 4799 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.580658 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:35.08064604 +0000 UTC m=+941.706936063 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "metrics-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.580773 4799 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.580844 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:35.080833195 +0000 UTC m=+941.707123218 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "webhook-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.606612 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhkl7\" (UniqueName: \"kubernetes.io/projected/3e5a60e4-5801-4273-a08a-20907c8bed09-kube-api-access-jhkl7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hnnwf\" (UID: \"3e5a60e4-5801-4273-a08a-20907c8bed09\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.617960 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.641434 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxxps\" (UniqueName: \"kubernetes.io/projected/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-kube-api-access-pxxps\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.666339 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.693754 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-msz6d"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.704472 4799 generic.go:334] "Generic (PLEG): container finished" podID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerID="842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0" exitCode=0 Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.704588 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnlm" event={"ID":"4e0327f8-1c8b-43c2-b462-1c60671d91d5","Type":"ContainerDied","Data":"842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0"} Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.709318 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" event={"ID":"aa887ea8-0375-49c1-b802-9b3c8468fa87","Type":"ContainerStarted","Data":"f22ce7390d25b1c9e0fff654f4646f69929accf3c77cc3714b1021e19724637e"} Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.711414 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" event={"ID":"99a0338e-5d7f-47cd-a30f-8c57ab921724","Type":"ContainerStarted","Data":"225184a64d995f2e4464923e92640d1ce28022e27bce0727517c8fb48ae51ab1"} Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.869068 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.874504 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.874887 4799 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: E0121 17:48:34.876988 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert podName:7654ac1c-746c-46e6-b276-e9f6a839a187 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:36.876961666 +0000 UTC m=+943.503251689 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert") pod "infra-operator-controller-manager-77c48c7859-ffgnr" (UID: "7654ac1c-746c-46e6-b276-e9f6a839a187") : secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.895740 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.931562 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv"] Jan 21 17:48:34 crc kubenswrapper[4799]: I0121 17:48:34.936922 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.157917 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.168793 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.180499 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.180610 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.180745 4799 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.180816 4799 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.180828 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:36.180807445 +0000 UTC m=+942.807097468 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "metrics-server-cert" not found Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.180913 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:36.180894068 +0000 UTC m=+942.807184091 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "webhook-server-cert" not found Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.199886 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf"] Jan 21 17:48:35 crc kubenswrapper[4799]: W0121 17:48:35.210705 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod223724ab_b9ee_4f55_b1ab_bf730a6314f9.slice/crio-7411df2eabdfb812cf31c23660098a144ddbe5192994b6893fa7fbee88efe2d0 WatchSource:0}: Error finding container 7411df2eabdfb812cf31c23660098a144ddbe5192994b6893fa7fbee88efe2d0: Status 404 returned error can't find the container with id 7411df2eabdfb812cf31c23660098a144ddbe5192994b6893fa7fbee88efe2d0 Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.222792 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.310517 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.318804 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.325149 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk"] Jan 21 17:48:35 crc kubenswrapper[4799]: W0121 17:48:35.330171 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92a8c35c_6ef9_4453_9233_df8579764cd2.slice/crio-ea2bc86af602644b937f2b1feda7507583df3b3437baec437f32c9e4b77460c7 WatchSource:0}: Error finding container ea2bc86af602644b937f2b1feda7507583df3b3437baec437f32c9e4b77460c7: Status 404 returned error can't find the container with id ea2bc86af602644b937f2b1feda7507583df3b3437baec437f32c9e4b77460c7 Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.330436 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-v552f"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.496394 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.505573 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.514024 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg"] Jan 21 17:48:35 crc kubenswrapper[4799]: W0121 17:48:35.520651 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9e14e78_dbbf_4941_9abb_5e2f8eb1fc2f.slice/crio-9f744eee54d1a3f44eb48ebe4806ac6d35c10918b67fbe1f1f5807716869e0f4 WatchSource:0}: Error finding container 9f744eee54d1a3f44eb48ebe4806ac6d35c10918b67fbe1f1f5807716869e0f4: Status 404 returned error can't find the container with id 9f744eee54d1a3f44eb48ebe4806ac6d35c10918b67fbe1f1f5807716869e0f4 Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.523915 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:2e89109f5db66abf1afd15ef59bda35a53db40c5e59e020579ac5aa0acea1843,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qv24p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5f8f495fcf-n2lq2_openstack-operators(a2536e9b-8292-474d-ae06-00e4721120b3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.525031 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" podUID="a2536e9b-8292-474d-ae06-00e4721120b3" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.524957 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:9404536bf7cb7c3818e1a0f92b53e4d7c02fe7942324f32894106f02f8fc7e92,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ndmsh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-85dd56d4cc-dqwbg_openstack-operators(d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.526537 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" podUID="d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f" Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.589954 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.590244 4799 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.590309 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert podName:4d7fd266-ebc9-46f2-9355-4dac2699822c nodeName:}" failed. No retries permitted until 2026-01-21 17:48:37.590289537 +0000 UTC m=+944.216579560 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" (UID: "4d7fd266-ebc9-46f2-9355-4dac2699822c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.688221 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.698294 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5"] Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.703828 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz"] Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.726328 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:244a4906353b84899db16a89e1ebb64491c9f85e69327cb2a72b6da0142a6e5e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8jzdc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-7cd8bc9dbb-v4wk5_openstack-operators(44bcdd0c-5b20-4387-a105-c8f3fb661a6f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.727560 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" podUID="44bcdd0c-5b20-4387-a105-c8f3fb661a6f" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.727835 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.30:5001/openstack-k8s-operators/watcher-operator:8f89cebcdb83b244613d84873d84cfe705f618b0,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7lhfk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-65f5896948-jrzsz_openstack-operators(822a89c1-0086-49f1-9bee-6ac87a2af52a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.729012 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" podUID="822a89c1-0086-49f1-9bee-6ac87a2af52a" Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.734685 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" event={"ID":"f90c4327-642d-4efd-90d3-7d3b83dbcfc9","Type":"ContainerStarted","Data":"c791ae4a832ef0f8d49ef710560ea222d333c98f4dfd9f5b1d81ca4885de06ca"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.736421 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" event={"ID":"0e8e19fd-c988-48ce-9150-1b46974bd86e","Type":"ContainerStarted","Data":"090d4af2e7c42ec76ccfbef5c126ffb421746b8ae15a939d800662ff9289c197"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.738568 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" event={"ID":"e898f43a-2487-48f8-9615-f02fdbd9eb30","Type":"ContainerStarted","Data":"629f3417ea7be7e1b2c41c4b3e110e840cc10ccea150294d0b2df352063cdaef"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.753687 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" event={"ID":"92a8c35c-6ef9-4453-9233-df8579764cd2","Type":"ContainerStarted","Data":"ea2bc86af602644b937f2b1feda7507583df3b3437baec437f32c9e4b77460c7"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.756439 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" event={"ID":"223724ab-b9ee-4f55-b1ab-bf730a6314f9","Type":"ContainerStarted","Data":"7411df2eabdfb812cf31c23660098a144ddbe5192994b6893fa7fbee88efe2d0"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.760284 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" event={"ID":"3a0e1cc6-500f-4493-8a18-0eeea206a4f7","Type":"ContainerStarted","Data":"a49d66adc682af5dcabeeaff53fb848a91f1b3b108d93e6ddaa7254a1169202c"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.764255 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" event={"ID":"d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f","Type":"ContainerStarted","Data":"9f744eee54d1a3f44eb48ebe4806ac6d35c10918b67fbe1f1f5807716869e0f4"} Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.766092 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:9404536bf7cb7c3818e1a0f92b53e4d7c02fe7942324f32894106f02f8fc7e92\\\"\"" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" podUID="d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f" Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.767973 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" event={"ID":"70fddebf-b616-47bd-a139-d2a4999624dd","Type":"ContainerStarted","Data":"eaf702071fd245fd1fb69c071791a0307cd0f8760f1c1a51c890f64c867ff59c"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.770561 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" event={"ID":"b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1","Type":"ContainerStarted","Data":"854fdfba36ae166dc2935149ee879da9318496eb409fb72f5d1d435d66d93ff1"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.798651 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" event={"ID":"ac9f205a-3d30-4ca3-b253-32c441466211","Type":"ContainerStarted","Data":"618b19b32791cbc37bd204b690742981d79a2999d0fcc2a58c25efbde63201fa"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.801802 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" event={"ID":"a2536e9b-8292-474d-ae06-00e4721120b3","Type":"ContainerStarted","Data":"57d73cb2252205f8e6b5fcd7ab721f56a1371f9cd98995f9be34f617c50e81dc"} Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.803154 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:2e89109f5db66abf1afd15ef59bda35a53db40c5e59e020579ac5aa0acea1843\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" podUID="a2536e9b-8292-474d-ae06-00e4721120b3" Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.809465 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" event={"ID":"10ffe97a-fa49-481f-9e79-55627ab24692","Type":"ContainerStarted","Data":"5842c7841c05f063764cc1f629402b55784d48c30e4767c23dc769e7116bc571"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.811607 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" event={"ID":"3e5a60e4-5801-4273-a08a-20907c8bed09","Type":"ContainerStarted","Data":"2dbbb87ef16a57704176c959765e561ecfcbfc857bfae6727c53751e77887a3e"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.813016 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" event={"ID":"b35d565f-4d9f-437a-add9-8ef40d891e99","Type":"ContainerStarted","Data":"b91785078c9733a7d3c97cdf5a2b2fa5253a1abcc2efd82f32fcbd8586f4ab9e"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.817255 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" event={"ID":"11e4c63f-cdc3-4d50-a4e7-03386747ca86","Type":"ContainerStarted","Data":"5a5e7e368f5516e27b01a9bde0c0339e4d976bb8f86cb7d4138e80e66562d210"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.820644 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" event={"ID":"ed464d3c-bdd7-4b19-a332-402ddeccb65b","Type":"ContainerStarted","Data":"738113c42e770e49063b98fb305b78b5bf12733477dc0fb97b74e41c17bdb901"} Jan 21 17:48:35 crc kubenswrapper[4799]: I0121 17:48:35.827372 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tqgcv"] Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.846537 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6rqpg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-djnlm_openshift-marketplace(4e0327f8-1c8b-43c2-b462-1c60671d91d5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 21 17:48:35 crc kubenswrapper[4799]: E0121 17:48:35.855436 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"pull QPS exceeded\"" pod="openshift-marketplace/certified-operators-djnlm" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" Jan 21 17:48:36 crc kubenswrapper[4799]: I0121 17:48:36.206789 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:36 crc kubenswrapper[4799]: I0121 17:48:36.206975 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.207179 4799 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.207244 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:38.207226655 +0000 UTC m=+944.833516678 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "webhook-server-cert" not found Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.207321 4799 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.207347 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:38.207339318 +0000 UTC m=+944.833629341 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "metrics-server-cert" not found Jan 21 17:48:36 crc kubenswrapper[4799]: I0121 17:48:36.838998 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" event={"ID":"822a89c1-0086-49f1-9bee-6ac87a2af52a","Type":"ContainerStarted","Data":"2780be86671722f6e78695743ec08f722135347d1736541d62f4a165b70e9057"} Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.842775 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/openstack-k8s-operators/watcher-operator:8f89cebcdb83b244613d84873d84cfe705f618b0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" podUID="822a89c1-0086-49f1-9bee-6ac87a2af52a" Jan 21 17:48:36 crc kubenswrapper[4799]: I0121 17:48:36.849795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" event={"ID":"44bcdd0c-5b20-4387-a105-c8f3fb661a6f","Type":"ContainerStarted","Data":"fe87406cd42afc136fd612c363b7b2f317b2b65d8aef4aab38aebdc9bee4963b"} Jan 21 17:48:36 crc kubenswrapper[4799]: I0121 17:48:36.850068 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tqgcv" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" containerName="registry-server" containerID="cri-o://21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5" gracePeriod=2 Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.851576 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:2e89109f5db66abf1afd15ef59bda35a53db40c5e59e020579ac5aa0acea1843\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" podUID="a2536e9b-8292-474d-ae06-00e4721120b3" Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.851654 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:244a4906353b84899db16a89e1ebb64491c9f85e69327cb2a72b6da0142a6e5e\\\"\"" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" podUID="44bcdd0c-5b20-4387-a105-c8f3fb661a6f" Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.858362 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:9404536bf7cb7c3818e1a0f92b53e4d7c02fe7942324f32894106f02f8fc7e92\\\"\"" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" podUID="d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f" Jan 21 17:48:36 crc kubenswrapper[4799]: I0121 17:48:36.924432 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.924807 4799 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:36 crc kubenswrapper[4799]: E0121 17:48:36.924907 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert podName:7654ac1c-746c-46e6-b276-e9f6a839a187 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:40.924882878 +0000 UTC m=+947.551172901 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert") pod "infra-operator-controller-manager-77c48c7859-ffgnr" (UID: "7654ac1c-746c-46e6-b276-e9f6a839a187") : secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.477108 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.639193 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-catalog-content\") pod \"f25be100-1ae0-4419-b2b3-82140d80878e\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.639365 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-utilities\") pod \"f25be100-1ae0-4419-b2b3-82140d80878e\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.639508 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fb7v\" (UniqueName: \"kubernetes.io/projected/f25be100-1ae0-4419-b2b3-82140d80878e-kube-api-access-2fb7v\") pod \"f25be100-1ae0-4419-b2b3-82140d80878e\" (UID: \"f25be100-1ae0-4419-b2b3-82140d80878e\") " Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.639870 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:37 crc kubenswrapper[4799]: E0121 17:48:37.640087 4799 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:37 crc kubenswrapper[4799]: E0121 17:48:37.640496 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert podName:4d7fd266-ebc9-46f2-9355-4dac2699822c nodeName:}" failed. No retries permitted until 2026-01-21 17:48:41.640473822 +0000 UTC m=+948.266763845 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" (UID: "4d7fd266-ebc9-46f2-9355-4dac2699822c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.640527 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-utilities" (OuterVolumeSpecName: "utilities") pod "f25be100-1ae0-4419-b2b3-82140d80878e" (UID: "f25be100-1ae0-4419-b2b3-82140d80878e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.666453 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f25be100-1ae0-4419-b2b3-82140d80878e-kube-api-access-2fb7v" (OuterVolumeSpecName: "kube-api-access-2fb7v") pod "f25be100-1ae0-4419-b2b3-82140d80878e" (UID: "f25be100-1ae0-4419-b2b3-82140d80878e"). InnerVolumeSpecName "kube-api-access-2fb7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.690956 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f25be100-1ae0-4419-b2b3-82140d80878e" (UID: "f25be100-1ae0-4419-b2b3-82140d80878e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.741205 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.741241 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fb7v\" (UniqueName: \"kubernetes.io/projected/f25be100-1ae0-4419-b2b3-82140d80878e-kube-api-access-2fb7v\") on node \"crc\" DevicePath \"\"" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.741255 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f25be100-1ae0-4419-b2b3-82140d80878e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.911633 4799 generic.go:334] "Generic (PLEG): container finished" podID="f25be100-1ae0-4419-b2b3-82140d80878e" containerID="21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5" exitCode=0 Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.911746 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tqgcv" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.911757 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tqgcv" event={"ID":"f25be100-1ae0-4419-b2b3-82140d80878e","Type":"ContainerDied","Data":"21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5"} Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.911878 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tqgcv" event={"ID":"f25be100-1ae0-4419-b2b3-82140d80878e","Type":"ContainerDied","Data":"1f3c996e313210cd6a5ed5cace8546b4d3e365e96473c14e4b5f7c01f04543ed"} Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.912006 4799 scope.go:117] "RemoveContainer" containerID="21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5" Jan 21 17:48:37 crc kubenswrapper[4799]: E0121 17:48:37.916764 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:244a4906353b84899db16a89e1ebb64491c9f85e69327cb2a72b6da0142a6e5e\\\"\"" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" podUID="44bcdd0c-5b20-4387-a105-c8f3fb661a6f" Jan 21 17:48:37 crc kubenswrapper[4799]: E0121 17:48:37.917059 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/openstack-k8s-operators/watcher-operator:8f89cebcdb83b244613d84873d84cfe705f618b0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" podUID="822a89c1-0086-49f1-9bee-6ac87a2af52a" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.981564 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tqgcv"] Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.989713 4799 scope.go:117] "RemoveContainer" containerID="ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19" Jan 21 17:48:37 crc kubenswrapper[4799]: I0121 17:48:37.997693 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tqgcv"] Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.058820 4799 scope.go:117] "RemoveContainer" containerID="0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.088630 4799 scope.go:117] "RemoveContainer" containerID="21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5" Jan 21 17:48:38 crc kubenswrapper[4799]: E0121 17:48:38.090918 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5\": container with ID starting with 21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5 not found: ID does not exist" containerID="21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.091220 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5"} err="failed to get container status \"21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5\": rpc error: code = NotFound desc = could not find container \"21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5\": container with ID starting with 21841337815daf48dac3ccc0af7aa0d6664ff6a4b249092dc75dcba9aa7330c5 not found: ID does not exist" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.091260 4799 scope.go:117] "RemoveContainer" containerID="ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19" Jan 21 17:48:38 crc kubenswrapper[4799]: E0121 17:48:38.091983 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19\": container with ID starting with ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19 not found: ID does not exist" containerID="ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.092081 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19"} err="failed to get container status \"ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19\": rpc error: code = NotFound desc = could not find container \"ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19\": container with ID starting with ab6081a8e3ab614f03e12c4303a600a0a4ed8f5dec7ef89ee65720dfdf0c9f19 not found: ID does not exist" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.092156 4799 scope.go:117] "RemoveContainer" containerID="0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68" Jan 21 17:48:38 crc kubenswrapper[4799]: E0121 17:48:38.092874 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68\": container with ID starting with 0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68 not found: ID does not exist" containerID="0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.092904 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68"} err="failed to get container status \"0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68\": rpc error: code = NotFound desc = could not find container \"0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68\": container with ID starting with 0b432ae396fd79d60dcd30058b8a1191084e0efd696f0ad6245c6cd4757b0d68 not found: ID does not exist" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.222318 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" path="/var/lib/kubelet/pods/f25be100-1ae0-4419-b2b3-82140d80878e/volumes" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.253412 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:38 crc kubenswrapper[4799]: I0121 17:48:38.253497 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:38 crc kubenswrapper[4799]: E0121 17:48:38.253634 4799 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 21 17:48:38 crc kubenswrapper[4799]: E0121 17:48:38.253695 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:42.253679296 +0000 UTC m=+948.879969319 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "metrics-server-cert" not found Jan 21 17:48:38 crc kubenswrapper[4799]: E0121 17:48:38.253794 4799 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 21 17:48:38 crc kubenswrapper[4799]: E0121 17:48:38.253961 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:42.253927453 +0000 UTC m=+948.880217626 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "webhook-server-cert" not found Jan 21 17:48:40 crc kubenswrapper[4799]: I0121 17:48:40.965748 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:40 crc kubenswrapper[4799]: E0121 17:48:40.966082 4799 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:40 crc kubenswrapper[4799]: E0121 17:48:40.966471 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert podName:7654ac1c-746c-46e6-b276-e9f6a839a187 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:48.966441079 +0000 UTC m=+955.592731112 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert") pod "infra-operator-controller-manager-77c48c7859-ffgnr" (UID: "7654ac1c-746c-46e6-b276-e9f6a839a187") : secret "infra-operator-webhook-server-cert" not found Jan 21 17:48:41 crc kubenswrapper[4799]: I0121 17:48:41.688983 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:41 crc kubenswrapper[4799]: E0121 17:48:41.689214 4799 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:41 crc kubenswrapper[4799]: E0121 17:48:41.689310 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert podName:4d7fd266-ebc9-46f2-9355-4dac2699822c nodeName:}" failed. No retries permitted until 2026-01-21 17:48:49.689287958 +0000 UTC m=+956.315577981 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" (UID: "4d7fd266-ebc9-46f2-9355-4dac2699822c") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 21 17:48:42 crc kubenswrapper[4799]: I0121 17:48:42.298998 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:42 crc kubenswrapper[4799]: I0121 17:48:42.299093 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:42 crc kubenswrapper[4799]: E0121 17:48:42.299207 4799 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 21 17:48:42 crc kubenswrapper[4799]: E0121 17:48:42.299285 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:50.299260981 +0000 UTC m=+956.925551004 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "webhook-server-cert" not found Jan 21 17:48:42 crc kubenswrapper[4799]: E0121 17:48:42.299344 4799 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 21 17:48:42 crc kubenswrapper[4799]: E0121 17:48:42.299455 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:48:50.299431766 +0000 UTC m=+956.925721979 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "metrics-server-cert" not found Jan 21 17:48:48 crc kubenswrapper[4799]: I0121 17:48:48.991416 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:48 crc kubenswrapper[4799]: I0121 17:48:48.999704 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7654ac1c-746c-46e6-b276-e9f6a839a187-cert\") pod \"infra-operator-controller-manager-77c48c7859-ffgnr\" (UID: \"7654ac1c-746c-46e6-b276-e9f6a839a187\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:49 crc kubenswrapper[4799]: E0121 17:48:49.002944 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:56c5f8b78445b3dbfc0d5afd9312906f6bef4dccf67302b0e4e5ca20bd263525" Jan 21 17:48:49 crc kubenswrapper[4799]: E0121 17:48:49.005725 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:56c5f8b78445b3dbfc0d5afd9312906f6bef4dccf67302b0e4e5ca20bd263525,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tqpnj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-78757b4889-75r9k_openstack-operators(3a0e1cc6-500f-4493-8a18-0eeea206a4f7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:48:49 crc kubenswrapper[4799]: E0121 17:48:49.006886 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" podUID="3a0e1cc6-500f-4493-8a18-0eeea206a4f7" Jan 21 17:48:49 crc kubenswrapper[4799]: I0121 17:48:49.040196 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:48:49 crc kubenswrapper[4799]: I0121 17:48:49.702670 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:49 crc kubenswrapper[4799]: I0121 17:48:49.723161 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d7fd266-ebc9-46f2-9355-4dac2699822c-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854nht6t\" (UID: \"4d7fd266-ebc9-46f2-9355-4dac2699822c\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:49 crc kubenswrapper[4799]: E0121 17:48:49.755581 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf" Jan 21 17:48:49 crc kubenswrapper[4799]: E0121 17:48:49.755887 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bg4cb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-55db956ddc-7nxnh_openstack-operators(92a8c35c-6ef9-4453-9233-df8579764cd2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:48:49 crc kubenswrapper[4799]: E0121 17:48:49.757176 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" podUID="92a8c35c-6ef9-4453-9233-df8579764cd2" Jan 21 17:48:49 crc kubenswrapper[4799]: I0121 17:48:49.969830 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:48:50 crc kubenswrapper[4799]: E0121 17:48:50.043917 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" podUID="92a8c35c-6ef9-4453-9233-df8579764cd2" Jan 21 17:48:50 crc kubenswrapper[4799]: E0121 17:48:50.044080 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:56c5f8b78445b3dbfc0d5afd9312906f6bef4dccf67302b0e4e5ca20bd263525\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" podUID="3a0e1cc6-500f-4493-8a18-0eeea206a4f7" Jan 21 17:48:50 crc kubenswrapper[4799]: I0121 17:48:50.333073 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:50 crc kubenswrapper[4799]: I0121 17:48:50.333206 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:48:50 crc kubenswrapper[4799]: E0121 17:48:50.333453 4799 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 21 17:48:50 crc kubenswrapper[4799]: E0121 17:48:50.333536 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:49:06.333513482 +0000 UTC m=+972.959803505 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "metrics-server-cert" not found Jan 21 17:48:50 crc kubenswrapper[4799]: E0121 17:48:50.333775 4799 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 21 17:48:50 crc kubenswrapper[4799]: E0121 17:48:50.333960 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs podName:f0bcc23c-7399-4a1f-a91b-f643eaee6e60 nodeName:}" failed. No retries permitted until 2026-01-21 17:49:06.333930524 +0000 UTC m=+972.960220557 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs") pod "openstack-operator-controller-manager-7ffc46955b-5t4q2" (UID: "f0bcc23c-7399-4a1f-a91b-f643eaee6e60") : secret "webhook-server-cert" not found Jan 21 17:48:52 crc kubenswrapper[4799]: E0121 17:48:52.158663 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 21 17:48:52 crc kubenswrapper[4799]: E0121 17:48:52.159250 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jhkl7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-hnnwf_openstack-operators(3e5a60e4-5801-4273-a08a-20907c8bed09): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:48:52 crc kubenswrapper[4799]: E0121 17:48:52.160447 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" podUID="3e5a60e4-5801-4273-a08a-20907c8bed09" Jan 21 17:48:53 crc kubenswrapper[4799]: E0121 17:48:53.081935 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" podUID="3e5a60e4-5801-4273-a08a-20907c8bed09" Jan 21 17:48:53 crc kubenswrapper[4799]: I0121 17:48:53.628825 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr"] Jan 21 17:48:53 crc kubenswrapper[4799]: W0121 17:48:53.942492 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7654ac1c_746c_46e6_b276_e9f6a839a187.slice/crio-f37948276b631738eacd552bbbe0b5690f964f2497ae8accbb1bda2c7c98814b WatchSource:0}: Error finding container f37948276b631738eacd552bbbe0b5690f964f2497ae8accbb1bda2c7c98814b: Status 404 returned error can't find the container with id f37948276b631738eacd552bbbe0b5690f964f2497ae8accbb1bda2c7c98814b Jan 21 17:48:54 crc kubenswrapper[4799]: I0121 17:48:54.131564 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" event={"ID":"7654ac1c-746c-46e6-b276-e9f6a839a187","Type":"ContainerStarted","Data":"f37948276b631738eacd552bbbe0b5690f964f2497ae8accbb1bda2c7c98814b"} Jan 21 17:48:54 crc kubenswrapper[4799]: I0121 17:48:54.431752 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t"] Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.174441 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" event={"ID":"11e4c63f-cdc3-4d50-a4e7-03386747ca86","Type":"ContainerStarted","Data":"f77e4ab96b00ef622e08e7531889ac4945dac7183d08d54886644e8a34778827"} Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.176221 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.196583 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.206090 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" podStartSLOduration=4.466387827 podStartE2EDuration="22.206064406s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.402075593 +0000 UTC m=+942.028365616" lastFinishedPulling="2026-01-21 17:48:53.141752172 +0000 UTC m=+959.768042195" observedRunningTime="2026-01-21 17:48:55.196903398 +0000 UTC m=+961.823193421" watchObservedRunningTime="2026-01-21 17:48:55.206064406 +0000 UTC m=+961.832354429" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.217039 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" event={"ID":"b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1","Type":"ContainerStarted","Data":"dc9aa2a50f5e41a1ccdfab65965456287e9741c7fdacd0b45fa22a0acb50964e"} Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.218723 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.221348 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" event={"ID":"4d7fd266-ebc9-46f2-9355-4dac2699822c","Type":"ContainerStarted","Data":"4e8d47e0280f1d7be57dcbace9bb672f04f696eab81cc5ddb4d7e64d234ca6ac"} Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.239832 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" podStartSLOduration=4.776319393 podStartE2EDuration="23.239814099s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:34.669074947 +0000 UTC m=+941.295364970" lastFinishedPulling="2026-01-21 17:48:53.132569653 +0000 UTC m=+959.758859676" observedRunningTime="2026-01-21 17:48:55.235261171 +0000 UTC m=+961.861551194" watchObservedRunningTime="2026-01-21 17:48:55.239814099 +0000 UTC m=+961.866104122" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.249197 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" event={"ID":"70fddebf-b616-47bd-a139-d2a4999624dd","Type":"ContainerStarted","Data":"10d22ccc7d4c4f0687059007a36026245b57f9541548a11d5c1d3c217516c3d4"} Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.249808 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.262517 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" event={"ID":"10ffe97a-fa49-481f-9e79-55627ab24692","Type":"ContainerStarted","Data":"346c4e6e1c8b0b211382706276071e58192e60be9e7984a16450ded163b44215"} Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.263802 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.289281 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" podStartSLOduration=4.677802687 podStartE2EDuration="22.289248735s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.520999271 +0000 UTC m=+942.147289304" lastFinishedPulling="2026-01-21 17:48:53.132445329 +0000 UTC m=+959.758735352" observedRunningTime="2026-01-21 17:48:55.278240124 +0000 UTC m=+961.904530147" watchObservedRunningTime="2026-01-21 17:48:55.289248735 +0000 UTC m=+961.915538758" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.346621 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" podStartSLOduration=5.198298327 podStartE2EDuration="23.346606004s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:34.984910044 +0000 UTC m=+941.611200057" lastFinishedPulling="2026-01-21 17:48:53.133217711 +0000 UTC m=+959.759507734" observedRunningTime="2026-01-21 17:48:55.341647134 +0000 UTC m=+961.967937157" watchObservedRunningTime="2026-01-21 17:48:55.346606004 +0000 UTC m=+961.972896027" Jan 21 17:48:55 crc kubenswrapper[4799]: I0121 17:48:55.385553 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" podStartSLOduration=5.002416406 podStartE2EDuration="23.385535023s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:34.762742801 +0000 UTC m=+941.389032824" lastFinishedPulling="2026-01-21 17:48:53.145861418 +0000 UTC m=+959.772151441" observedRunningTime="2026-01-21 17:48:55.382689203 +0000 UTC m=+962.008979216" watchObservedRunningTime="2026-01-21 17:48:55.385535023 +0000 UTC m=+962.011825046" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.277874 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" event={"ID":"a2536e9b-8292-474d-ae06-00e4721120b3","Type":"ContainerStarted","Data":"4da95b09bef949daa95b16c61f4c814bb39adcb64ea7c23c3c740e7e96f0d0a1"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.278924 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.295155 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" event={"ID":"0e8e19fd-c988-48ce-9150-1b46974bd86e","Type":"ContainerStarted","Data":"cd5ff6685ab7c5d7a2d509f912d30164e3260d6b06fab2a237f6ec8d22de552d"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.295313 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.307702 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" event={"ID":"d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f","Type":"ContainerStarted","Data":"022786e93cca03db5e3b8cabe5f7e9f7d26b62ffb0a608db09d172d00da88351"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.307963 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.309942 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" event={"ID":"99a0338e-5d7f-47cd-a30f-8c57ab921724","Type":"ContainerStarted","Data":"f248a977d67f8ca6099fa0853c9f52a07b961d247eb185bf107b6b28e63af653"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.328726 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" event={"ID":"223724ab-b9ee-4f55-b1ab-bf730a6314f9","Type":"ContainerStarted","Data":"633cb4e23f00db84f94051e50427445508e9b1474fc2870045178dcea650deca"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.341442 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" event={"ID":"f90c4327-642d-4efd-90d3-7d3b83dbcfc9","Type":"ContainerStarted","Data":"021e250ff22795921c1b95db26508dc4aea9863f438f87f9e235442b8e0020c8"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.342330 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.361422 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" event={"ID":"b35d565f-4d9f-437a-add9-8ef40d891e99","Type":"ContainerStarted","Data":"aa2282b9d56148f6d140b40372e92c4682488de4fcc9cab9ba3e1abd07b247c8"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.361582 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.369075 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" event={"ID":"44bcdd0c-5b20-4387-a105-c8f3fb661a6f","Type":"ContainerStarted","Data":"57b722afc3d9daefc46a11bde5968b22111fdd369e4e323617726a5c20e41ec3"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.369386 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.387209 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" event={"ID":"ed464d3c-bdd7-4b19-a332-402ddeccb65b","Type":"ContainerStarted","Data":"f2440106cc2efcd3509a5e6fe44e20d908924bc17f046a60189b1313bc539d39"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.387939 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.390528 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" podStartSLOduration=4.193657407 podStartE2EDuration="23.390506538s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.523325326 +0000 UTC m=+942.149615349" lastFinishedPulling="2026-01-21 17:48:54.720174457 +0000 UTC m=+961.346464480" observedRunningTime="2026-01-21 17:48:56.386752872 +0000 UTC m=+963.013042895" watchObservedRunningTime="2026-01-21 17:48:56.390506538 +0000 UTC m=+963.016796551" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.418418 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" event={"ID":"aa887ea8-0375-49c1-b802-9b3c8468fa87","Type":"ContainerStarted","Data":"cc6201211e37f67bdcb60125b89d43004c8f38152fa0ccab6304b650fcdf473e"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.418933 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.464447 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" event={"ID":"ac9f205a-3d30-4ca3-b253-32c441466211","Type":"ContainerStarted","Data":"fab5dbe9a8a4228d69e9e4a51c4ff21617945e5561814002757cf83337fefb65"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.466247 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.507367 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" event={"ID":"e898f43a-2487-48f8-9615-f02fdbd9eb30","Type":"ContainerStarted","Data":"a9be96e2eb544dd4b9bc16dd2db604c26d6793ad214f3d37b4ec0ec557b20354"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.508217 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.532775 4799 generic.go:334] "Generic (PLEG): container finished" podID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerID="76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0" exitCode=0 Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.533677 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnlm" event={"ID":"4e0327f8-1c8b-43c2-b462-1c60671d91d5","Type":"ContainerDied","Data":"76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0"} Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.584567 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" podStartSLOduration=6.817348551 podStartE2EDuration="24.584537077s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.365210602 +0000 UTC m=+941.991500625" lastFinishedPulling="2026-01-21 17:48:53.132399138 +0000 UTC m=+959.758689151" observedRunningTime="2026-01-21 17:48:56.584487085 +0000 UTC m=+963.210777108" watchObservedRunningTime="2026-01-21 17:48:56.584537077 +0000 UTC m=+963.210827100" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.651584 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" podStartSLOduration=4.4859743309999995 podStartE2EDuration="23.651551849s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.726090671 +0000 UTC m=+942.352380694" lastFinishedPulling="2026-01-21 17:48:54.891668189 +0000 UTC m=+961.517958212" observedRunningTime="2026-01-21 17:48:56.650488639 +0000 UTC m=+963.276778662" watchObservedRunningTime="2026-01-21 17:48:56.651551849 +0000 UTC m=+963.277841872" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.712994 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" podStartSLOduration=5.743688011 podStartE2EDuration="23.712959383s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.177575574 +0000 UTC m=+941.803865597" lastFinishedPulling="2026-01-21 17:48:53.146846946 +0000 UTC m=+959.773136969" observedRunningTime="2026-01-21 17:48:56.686419803 +0000 UTC m=+963.312709826" watchObservedRunningTime="2026-01-21 17:48:56.712959383 +0000 UTC m=+963.339249406" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.718761 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" podStartSLOduration=6.283821087 podStartE2EDuration="24.718748656s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:34.697489309 +0000 UTC m=+941.323779332" lastFinishedPulling="2026-01-21 17:48:53.132416888 +0000 UTC m=+959.758706901" observedRunningTime="2026-01-21 17:48:56.712893471 +0000 UTC m=+963.339183494" watchObservedRunningTime="2026-01-21 17:48:56.718748656 +0000 UTC m=+963.345038679" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.773598 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" podStartSLOduration=6.85345166 podStartE2EDuration="24.773577504s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.21248126 +0000 UTC m=+941.838771283" lastFinishedPulling="2026-01-21 17:48:53.132607104 +0000 UTC m=+959.758897127" observedRunningTime="2026-01-21 17:48:56.770625221 +0000 UTC m=+963.396915244" watchObservedRunningTime="2026-01-21 17:48:56.773577504 +0000 UTC m=+963.399867527" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.828574 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" podStartSLOduration=4.4359752199999996 podStartE2EDuration="23.828553357s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.524846019 +0000 UTC m=+942.151136042" lastFinishedPulling="2026-01-21 17:48:54.917424156 +0000 UTC m=+961.543714179" observedRunningTime="2026-01-21 17:48:56.823537665 +0000 UTC m=+963.449827688" watchObservedRunningTime="2026-01-21 17:48:56.828553357 +0000 UTC m=+963.454843380" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.872572 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" podStartSLOduration=5.933044658 podStartE2EDuration="23.872550579s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.192892147 +0000 UTC m=+941.819182170" lastFinishedPulling="2026-01-21 17:48:53.132398068 +0000 UTC m=+959.758688091" observedRunningTime="2026-01-21 17:48:56.865962003 +0000 UTC m=+963.492252036" watchObservedRunningTime="2026-01-21 17:48:56.872550579 +0000 UTC m=+963.498840602" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.883098 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" podStartSLOduration=6.706923084 podStartE2EDuration="24.883065576s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:34.966721031 +0000 UTC m=+941.593011044" lastFinishedPulling="2026-01-21 17:48:53.142863513 +0000 UTC m=+959.769153536" observedRunningTime="2026-01-21 17:48:56.850058514 +0000 UTC m=+963.476348547" watchObservedRunningTime="2026-01-21 17:48:56.883065576 +0000 UTC m=+963.509355599" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.915930 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" podStartSLOduration=6.051114147 podStartE2EDuration="24.915908883s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:34.261985453 +0000 UTC m=+940.888275476" lastFinishedPulling="2026-01-21 17:48:53.126780189 +0000 UTC m=+959.753070212" observedRunningTime="2026-01-21 17:48:56.905938081 +0000 UTC m=+963.532228114" watchObservedRunningTime="2026-01-21 17:48:56.915908883 +0000 UTC m=+963.542198906" Jan 21 17:48:56 crc kubenswrapper[4799]: I0121 17:48:56.945021 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" podStartSLOduration=6.15911026 podStartE2EDuration="23.944994584s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.360675284 +0000 UTC m=+941.986965307" lastFinishedPulling="2026-01-21 17:48:53.146559608 +0000 UTC m=+959.772849631" observedRunningTime="2026-01-21 17:48:56.922639673 +0000 UTC m=+963.548929696" watchObservedRunningTime="2026-01-21 17:48:56.944994584 +0000 UTC m=+963.571284607" Jan 21 17:48:57 crc kubenswrapper[4799]: I0121 17:48:57.548657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnlm" event={"ID":"4e0327f8-1c8b-43c2-b462-1c60671d91d5","Type":"ContainerStarted","Data":"39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc"} Jan 21 17:48:57 crc kubenswrapper[4799]: I0121 17:48:57.549813 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" Jan 21 17:48:57 crc kubenswrapper[4799]: I0121 17:48:57.576504 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-djnlm" podStartSLOduration=4.212872595 podStartE2EDuration="25.576467113s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.846355277 +0000 UTC m=+942.472645300" lastFinishedPulling="2026-01-21 17:48:57.209949795 +0000 UTC m=+963.836239818" observedRunningTime="2026-01-21 17:48:57.56998302 +0000 UTC m=+964.196273043" watchObservedRunningTime="2026-01-21 17:48:57.576467113 +0000 UTC m=+964.202757126" Jan 21 17:49:02 crc kubenswrapper[4799]: I0121 17:49:02.382761 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:49:02 crc kubenswrapper[4799]: I0121 17:49:02.383326 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:49:02 crc kubenswrapper[4799]: I0121 17:49:02.437889 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:49:02 crc kubenswrapper[4799]: I0121 17:49:02.651027 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.133186 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-xgqnz" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.185754 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-9f958b845-2qd6s" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.194041 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-6gbp4" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.240912 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-c6994669c-msz6d" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.253929 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-djnlm"] Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.319537 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-tslfv" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.395851 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ff7f6" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.595881 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" event={"ID":"822a89c1-0086-49f1-9bee-6ac87a2af52a","Type":"ContainerStarted","Data":"fcc141cb911fab369af7e28cab29e82bc7a182bb7eb34e3eee4a4078d21712ec"} Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.596863 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.598473 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" event={"ID":"7654ac1c-746c-46e6-b276-e9f6a839a187","Type":"ContainerStarted","Data":"c6f81f1e7786cc82c77a62b87bd18ea36a7925daa7812beb3676000bc22832a9"} Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.598886 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.600362 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" event={"ID":"92a8c35c-6ef9-4453-9233-df8579764cd2","Type":"ContainerStarted","Data":"66b34c391117b2bdfe6e526b58d644bc3d729c3641c600c6ac723dee0c403d23"} Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.600742 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.603246 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" event={"ID":"4d7fd266-ebc9-46f2-9355-4dac2699822c","Type":"ContainerStarted","Data":"a77a21f92d30b155b0c55c23bb86411f41693cc0f6cd6c0e592d5d2cae47b96f"} Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.603283 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.632381 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" podStartSLOduration=3.3488265139999998 podStartE2EDuration="30.632359308s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.727631275 +0000 UTC m=+942.353921298" lastFinishedPulling="2026-01-21 17:49:03.011164069 +0000 UTC m=+969.637454092" observedRunningTime="2026-01-21 17:49:03.630495005 +0000 UTC m=+970.256785028" watchObservedRunningTime="2026-01-21 17:49:03.632359308 +0000 UTC m=+970.258649331" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.647966 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-rw2zn" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.661732 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" podStartSLOduration=22.618445106 podStartE2EDuration="31.661709757s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:53.96126236 +0000 UTC m=+960.587552383" lastFinishedPulling="2026-01-21 17:49:03.004527011 +0000 UTC m=+969.630817034" observedRunningTime="2026-01-21 17:49:03.660452861 +0000 UTC m=+970.286742874" watchObservedRunningTime="2026-01-21 17:49:03.661709757 +0000 UTC m=+970.287999780" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.688398 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" podStartSLOduration=3.004891362 podStartE2EDuration="30.688376659s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.364411999 +0000 UTC m=+941.990702022" lastFinishedPulling="2026-01-21 17:49:03.047897296 +0000 UTC m=+969.674187319" observedRunningTime="2026-01-21 17:49:03.688060471 +0000 UTC m=+970.314350504" watchObservedRunningTime="2026-01-21 17:49:03.688376659 +0000 UTC m=+970.314666672" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.747766 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" podStartSLOduration=22.479836385 podStartE2EDuration="30.747736395s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:54.753860178 +0000 UTC m=+961.380150201" lastFinishedPulling="2026-01-21 17:49:03.021760188 +0000 UTC m=+969.648050211" observedRunningTime="2026-01-21 17:49:03.747118808 +0000 UTC m=+970.373408841" watchObservedRunningTime="2026-01-21 17:49:03.747736395 +0000 UTC m=+970.374026418" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.801896 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4ttvx" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.814702 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bz4tf" Jan 21 17:49:03 crc kubenswrapper[4799]: I0121 17:49:03.835867 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-rwglk" Jan 21 17:49:04 crc kubenswrapper[4799]: I0121 17:49:04.139188 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-2b24b" Jan 21 17:49:04 crc kubenswrapper[4799]: I0121 17:49:04.307709 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-65849867d6-v552f" Jan 21 17:49:04 crc kubenswrapper[4799]: I0121 17:49:04.347185 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9wst2" Jan 21 17:49:04 crc kubenswrapper[4799]: I0121 17:49:04.572943 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-dqwbg" Jan 21 17:49:04 crc kubenswrapper[4799]: I0121 17:49:04.578027 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-n2lq2" Jan 21 17:49:04 crc kubenswrapper[4799]: I0121 17:49:04.612481 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-djnlm" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerName="registry-server" containerID="cri-o://39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc" gracePeriod=2 Jan 21 17:49:04 crc kubenswrapper[4799]: I0121 17:49:04.875066 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-v4wk5" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.063166 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.125479 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-utilities\") pod \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.125777 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rqpg\" (UniqueName: \"kubernetes.io/projected/4e0327f8-1c8b-43c2-b462-1c60671d91d5-kube-api-access-6rqpg\") pod \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.126733 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-utilities" (OuterVolumeSpecName: "utilities") pod "4e0327f8-1c8b-43c2-b462-1c60671d91d5" (UID: "4e0327f8-1c8b-43c2-b462-1c60671d91d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.130184 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-catalog-content\") pod \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\" (UID: \"4e0327f8-1c8b-43c2-b462-1c60671d91d5\") " Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.130983 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.135783 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e0327f8-1c8b-43c2-b462-1c60671d91d5-kube-api-access-6rqpg" (OuterVolumeSpecName: "kube-api-access-6rqpg") pod "4e0327f8-1c8b-43c2-b462-1c60671d91d5" (UID: "4e0327f8-1c8b-43c2-b462-1c60671d91d5"). InnerVolumeSpecName "kube-api-access-6rqpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.187074 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e0327f8-1c8b-43c2-b462-1c60671d91d5" (UID: "4e0327f8-1c8b-43c2-b462-1c60671d91d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.233167 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0327f8-1c8b-43c2-b462-1c60671d91d5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.233245 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rqpg\" (UniqueName: \"kubernetes.io/projected/4e0327f8-1c8b-43c2-b462-1c60671d91d5-kube-api-access-6rqpg\") on node \"crc\" DevicePath \"\"" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.621300 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" event={"ID":"3a0e1cc6-500f-4493-8a18-0eeea206a4f7","Type":"ContainerStarted","Data":"14560398442af09db3a9706b564c5ee59f602913610fb6573358db90472bd2a7"} Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.622371 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.625671 4799 generic.go:334] "Generic (PLEG): container finished" podID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerID="39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc" exitCode=0 Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.625708 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnlm" event={"ID":"4e0327f8-1c8b-43c2-b462-1c60671d91d5","Type":"ContainerDied","Data":"39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc"} Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.625754 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnlm" event={"ID":"4e0327f8-1c8b-43c2-b462-1c60671d91d5","Type":"ContainerDied","Data":"5dc21a9dc23f6f4a3e37825a560c71b48eeb755c26fb30a0a76142d13b757c98"} Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.625777 4799 scope.go:117] "RemoveContainer" containerID="39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.625778 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djnlm" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.643887 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" podStartSLOduration=4.198416087 podStartE2EDuration="33.643864651s" podCreationTimestamp="2026-01-21 17:48:32 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.175448174 +0000 UTC m=+941.801738197" lastFinishedPulling="2026-01-21 17:49:04.620896738 +0000 UTC m=+971.247186761" observedRunningTime="2026-01-21 17:49:05.639733844 +0000 UTC m=+972.266023877" watchObservedRunningTime="2026-01-21 17:49:05.643864651 +0000 UTC m=+972.270154674" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.653305 4799 scope.go:117] "RemoveContainer" containerID="76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.666711 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-djnlm"] Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.669982 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-djnlm"] Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.690447 4799 scope.go:117] "RemoveContainer" containerID="842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.717042 4799 scope.go:117] "RemoveContainer" containerID="39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc" Jan 21 17:49:05 crc kubenswrapper[4799]: E0121 17:49:05.717681 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc\": container with ID starting with 39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc not found: ID does not exist" containerID="39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.717738 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc"} err="failed to get container status \"39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc\": rpc error: code = NotFound desc = could not find container \"39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc\": container with ID starting with 39ffa27cb648693d559a6437451ebc860bf465ea337a1065a6b3118cd08e3cdc not found: ID does not exist" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.717778 4799 scope.go:117] "RemoveContainer" containerID="76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0" Jan 21 17:49:05 crc kubenswrapper[4799]: E0121 17:49:05.718186 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0\": container with ID starting with 76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0 not found: ID does not exist" containerID="76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.718235 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0"} err="failed to get container status \"76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0\": rpc error: code = NotFound desc = could not find container \"76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0\": container with ID starting with 76f7d756d0cfa49dcc90ee4386468b6696f1b5bd432f1db26c2574510cf0ead0 not found: ID does not exist" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.718265 4799 scope.go:117] "RemoveContainer" containerID="842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0" Jan 21 17:49:05 crc kubenswrapper[4799]: E0121 17:49:05.718562 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0\": container with ID starting with 842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0 not found: ID does not exist" containerID="842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0" Jan 21 17:49:05 crc kubenswrapper[4799]: I0121 17:49:05.718596 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0"} err="failed to get container status \"842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0\": rpc error: code = NotFound desc = could not find container \"842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0\": container with ID starting with 842e2d1c2436161e5d9a203cab531bbdec5714a8c6bcc839194c36da8d7e98b0 not found: ID does not exist" Jan 21 17:49:06 crc kubenswrapper[4799]: I0121 17:49:06.214740 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" path="/var/lib/kubelet/pods/4e0327f8-1c8b-43c2-b462-1c60671d91d5/volumes" Jan 21 17:49:06 crc kubenswrapper[4799]: I0121 17:49:06.349868 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:49:06 crc kubenswrapper[4799]: I0121 17:49:06.349953 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:49:06 crc kubenswrapper[4799]: I0121 17:49:06.356480 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-webhook-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:49:06 crc kubenswrapper[4799]: I0121 17:49:06.357917 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f0bcc23c-7399-4a1f-a91b-f643eaee6e60-metrics-certs\") pod \"openstack-operator-controller-manager-7ffc46955b-5t4q2\" (UID: \"f0bcc23c-7399-4a1f-a91b-f643eaee6e60\") " pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:49:06 crc kubenswrapper[4799]: I0121 17:49:06.430627 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-sxnxn" Jan 21 17:49:06 crc kubenswrapper[4799]: I0121 17:49:06.438892 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:49:06 crc kubenswrapper[4799]: I0121 17:49:06.907379 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2"] Jan 21 17:49:07 crc kubenswrapper[4799]: I0121 17:49:07.643177 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" event={"ID":"f0bcc23c-7399-4a1f-a91b-f643eaee6e60","Type":"ContainerStarted","Data":"73ad1f36db65f3df105e6f82cf4be249bab6129b8f128fb5635c790ba2aa27a5"} Jan 21 17:49:09 crc kubenswrapper[4799]: I0121 17:49:09.046347 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-ffgnr" Jan 21 17:49:09 crc kubenswrapper[4799]: I0121 17:49:09.977507 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854nht6t" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.806427 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-75r9k" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.993339 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dp7kp"] Jan 21 17:49:13 crc kubenswrapper[4799]: E0121 17:49:13.993729 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" containerName="extract-content" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.993767 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" containerName="extract-content" Jan 21 17:49:13 crc kubenswrapper[4799]: E0121 17:49:13.993778 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerName="extract-content" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.993784 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerName="extract-content" Jan 21 17:49:13 crc kubenswrapper[4799]: E0121 17:49:13.993806 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerName="extract-utilities" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.993813 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerName="extract-utilities" Jan 21 17:49:13 crc kubenswrapper[4799]: E0121 17:49:13.993821 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" containerName="extract-utilities" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.993827 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" containerName="extract-utilities" Jan 21 17:49:13 crc kubenswrapper[4799]: E0121 17:49:13.993839 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" containerName="registry-server" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.993845 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" containerName="registry-server" Jan 21 17:49:13 crc kubenswrapper[4799]: E0121 17:49:13.993856 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerName="registry-server" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.993864 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerName="registry-server" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.994021 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f25be100-1ae0-4419-b2b3-82140d80878e" containerName="registry-server" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.994032 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e0327f8-1c8b-43c2-b462-1c60671d91d5" containerName="registry-server" Jan 21 17:49:13 crc kubenswrapper[4799]: I0121 17:49:13.995318 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.017078 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dp7kp"] Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.075748 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-catalog-content\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.076104 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22wnj\" (UniqueName: \"kubernetes.io/projected/2b27cd46-94cb-4007-817a-cdff3f683134-kube-api-access-22wnj\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.076213 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-utilities\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.178170 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-catalog-content\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.178233 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22wnj\" (UniqueName: \"kubernetes.io/projected/2b27cd46-94cb-4007-817a-cdff3f683134-kube-api-access-22wnj\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.178297 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-utilities\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.178942 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-catalog-content\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.178955 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-utilities\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.197619 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22wnj\" (UniqueName: \"kubernetes.io/projected/2b27cd46-94cb-4007-817a-cdff3f683134-kube-api-access-22wnj\") pod \"community-operators-dp7kp\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.314666 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.394831 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-7nxnh" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.583621 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-65f5896948-jrzsz" Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.718194 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" event={"ID":"f0bcc23c-7399-4a1f-a91b-f643eaee6e60","Type":"ContainerStarted","Data":"b3c5d3aba645609dc048c306df4249ef4982005f4d0ec942f7229aabbf054ae3"} Jan 21 17:49:14 crc kubenswrapper[4799]: I0121 17:49:14.832782 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dp7kp"] Jan 21 17:49:14 crc kubenswrapper[4799]: W0121 17:49:14.838139 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b27cd46_94cb_4007_817a_cdff3f683134.slice/crio-20b6d9d6caeb87c938280ce7ed6602174e7f4010642923519c093adef3b8b7e1 WatchSource:0}: Error finding container 20b6d9d6caeb87c938280ce7ed6602174e7f4010642923519c093adef3b8b7e1: Status 404 returned error can't find the container with id 20b6d9d6caeb87c938280ce7ed6602174e7f4010642923519c093adef3b8b7e1 Jan 21 17:49:15 crc kubenswrapper[4799]: I0121 17:49:15.726674 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dp7kp" event={"ID":"2b27cd46-94cb-4007-817a-cdff3f683134","Type":"ContainerStarted","Data":"20b6d9d6caeb87c938280ce7ed6602174e7f4010642923519c093adef3b8b7e1"} Jan 21 17:49:15 crc kubenswrapper[4799]: I0121 17:49:15.727052 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:49:15 crc kubenswrapper[4799]: I0121 17:49:15.751420 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" podStartSLOduration=42.751401221 podStartE2EDuration="42.751401221s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:49:15.747409318 +0000 UTC m=+982.373699341" watchObservedRunningTime="2026-01-21 17:49:15.751401221 +0000 UTC m=+982.377691244" Jan 21 17:49:16 crc kubenswrapper[4799]: I0121 17:49:16.736408 4799 generic.go:334] "Generic (PLEG): container finished" podID="2b27cd46-94cb-4007-817a-cdff3f683134" containerID="580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425" exitCode=0 Jan 21 17:49:16 crc kubenswrapper[4799]: I0121 17:49:16.736645 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dp7kp" event={"ID":"2b27cd46-94cb-4007-817a-cdff3f683134","Type":"ContainerDied","Data":"580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425"} Jan 21 17:49:18 crc kubenswrapper[4799]: I0121 17:49:18.756231 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" event={"ID":"3e5a60e4-5801-4273-a08a-20907c8bed09","Type":"ContainerStarted","Data":"47cfe3c9d998b35e043e99e54c5a6b984c182beca24cbd77f43c53fa5c4156a5"} Jan 21 17:49:18 crc kubenswrapper[4799]: I0121 17:49:18.781187 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hnnwf" podStartSLOduration=3.244433377 podStartE2EDuration="45.781161455s" podCreationTimestamp="2026-01-21 17:48:33 +0000 UTC" firstStartedPulling="2026-01-21 17:48:35.723834948 +0000 UTC m=+942.350124971" lastFinishedPulling="2026-01-21 17:49:18.260563016 +0000 UTC m=+984.886853049" observedRunningTime="2026-01-21 17:49:18.774868297 +0000 UTC m=+985.401158320" watchObservedRunningTime="2026-01-21 17:49:18.781161455 +0000 UTC m=+985.407451478" Jan 21 17:49:19 crc kubenswrapper[4799]: I0121 17:49:19.768751 4799 generic.go:334] "Generic (PLEG): container finished" podID="2b27cd46-94cb-4007-817a-cdff3f683134" containerID="94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2" exitCode=0 Jan 21 17:49:19 crc kubenswrapper[4799]: I0121 17:49:19.768856 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dp7kp" event={"ID":"2b27cd46-94cb-4007-817a-cdff3f683134","Type":"ContainerDied","Data":"94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2"} Jan 21 17:49:20 crc kubenswrapper[4799]: I0121 17:49:20.779169 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dp7kp" event={"ID":"2b27cd46-94cb-4007-817a-cdff3f683134","Type":"ContainerStarted","Data":"5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031"} Jan 21 17:49:20 crc kubenswrapper[4799]: I0121 17:49:20.796710 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dp7kp" podStartSLOduration=5.394132808 podStartE2EDuration="7.796687542s" podCreationTimestamp="2026-01-21 17:49:13 +0000 UTC" firstStartedPulling="2026-01-21 17:49:17.748270052 +0000 UTC m=+984.374560075" lastFinishedPulling="2026-01-21 17:49:20.150824796 +0000 UTC m=+986.777114809" observedRunningTime="2026-01-21 17:49:20.796027533 +0000 UTC m=+987.422317546" watchObservedRunningTime="2026-01-21 17:49:20.796687542 +0000 UTC m=+987.422977565" Jan 21 17:49:24 crc kubenswrapper[4799]: I0121 17:49:24.315293 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:24 crc kubenswrapper[4799]: I0121 17:49:24.315632 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:24 crc kubenswrapper[4799]: I0121 17:49:24.363265 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:26 crc kubenswrapper[4799]: I0121 17:49:26.447833 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7ffc46955b-5t4q2" Jan 21 17:49:34 crc kubenswrapper[4799]: I0121 17:49:34.362863 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:34 crc kubenswrapper[4799]: I0121 17:49:34.414227 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dp7kp"] Jan 21 17:49:34 crc kubenswrapper[4799]: I0121 17:49:34.891253 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dp7kp" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" containerName="registry-server" containerID="cri-o://5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031" gracePeriod=2 Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.843317 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.899512 4799 generic.go:334] "Generic (PLEG): container finished" podID="2b27cd46-94cb-4007-817a-cdff3f683134" containerID="5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031" exitCode=0 Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.899568 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dp7kp" event={"ID":"2b27cd46-94cb-4007-817a-cdff3f683134","Type":"ContainerDied","Data":"5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031"} Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.899605 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dp7kp" event={"ID":"2b27cd46-94cb-4007-817a-cdff3f683134","Type":"ContainerDied","Data":"20b6d9d6caeb87c938280ce7ed6602174e7f4010642923519c093adef3b8b7e1"} Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.899628 4799 scope.go:117] "RemoveContainer" containerID="5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.899895 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dp7kp" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.918215 4799 scope.go:117] "RemoveContainer" containerID="94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.941365 4799 scope.go:117] "RemoveContainer" containerID="580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.965211 4799 scope.go:117] "RemoveContainer" containerID="5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031" Jan 21 17:49:35 crc kubenswrapper[4799]: E0121 17:49:35.966279 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031\": container with ID starting with 5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031 not found: ID does not exist" containerID="5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.966350 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031"} err="failed to get container status \"5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031\": rpc error: code = NotFound desc = could not find container \"5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031\": container with ID starting with 5bcbc037af8b6ac20a20290cee5a00263df9bd7d29b68efcc12c82642ebf4031 not found: ID does not exist" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.966427 4799 scope.go:117] "RemoveContainer" containerID="94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2" Jan 21 17:49:35 crc kubenswrapper[4799]: E0121 17:49:35.967313 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2\": container with ID starting with 94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2 not found: ID does not exist" containerID="94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.967346 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2"} err="failed to get container status \"94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2\": rpc error: code = NotFound desc = could not find container \"94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2\": container with ID starting with 94fc90f725c73abdf4379c04b830f6987156c41d2f77ddd9871c1f7a69301fa2 not found: ID does not exist" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.967369 4799 scope.go:117] "RemoveContainer" containerID="580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425" Jan 21 17:49:35 crc kubenswrapper[4799]: E0121 17:49:35.967747 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425\": container with ID starting with 580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425 not found: ID does not exist" containerID="580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.967780 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425"} err="failed to get container status \"580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425\": rpc error: code = NotFound desc = could not find container \"580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425\": container with ID starting with 580488b09ae2a48a5ca393e1dbe1ddd870456e27b3257a3e1b4d12f63c3c2425 not found: ID does not exist" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.968775 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-utilities\") pod \"2b27cd46-94cb-4007-817a-cdff3f683134\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.968888 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22wnj\" (UniqueName: \"kubernetes.io/projected/2b27cd46-94cb-4007-817a-cdff3f683134-kube-api-access-22wnj\") pod \"2b27cd46-94cb-4007-817a-cdff3f683134\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.968922 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-catalog-content\") pod \"2b27cd46-94cb-4007-817a-cdff3f683134\" (UID: \"2b27cd46-94cb-4007-817a-cdff3f683134\") " Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.970359 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-utilities" (OuterVolumeSpecName: "utilities") pod "2b27cd46-94cb-4007-817a-cdff3f683134" (UID: "2b27cd46-94cb-4007-817a-cdff3f683134"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:49:35 crc kubenswrapper[4799]: I0121 17:49:35.974181 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b27cd46-94cb-4007-817a-cdff3f683134-kube-api-access-22wnj" (OuterVolumeSpecName: "kube-api-access-22wnj") pod "2b27cd46-94cb-4007-817a-cdff3f683134" (UID: "2b27cd46-94cb-4007-817a-cdff3f683134"). InnerVolumeSpecName "kube-api-access-22wnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:49:36 crc kubenswrapper[4799]: I0121 17:49:36.015766 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b27cd46-94cb-4007-817a-cdff3f683134" (UID: "2b27cd46-94cb-4007-817a-cdff3f683134"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:49:36 crc kubenswrapper[4799]: I0121 17:49:36.070646 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:49:36 crc kubenswrapper[4799]: I0121 17:49:36.070687 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22wnj\" (UniqueName: \"kubernetes.io/projected/2b27cd46-94cb-4007-817a-cdff3f683134-kube-api-access-22wnj\") on node \"crc\" DevicePath \"\"" Jan 21 17:49:36 crc kubenswrapper[4799]: I0121 17:49:36.070699 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b27cd46-94cb-4007-817a-cdff3f683134-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:49:36 crc kubenswrapper[4799]: I0121 17:49:36.254088 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dp7kp"] Jan 21 17:49:36 crc kubenswrapper[4799]: I0121 17:49:36.262047 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dp7kp"] Jan 21 17:49:38 crc kubenswrapper[4799]: I0121 17:49:38.214558 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" path="/var/lib/kubelet/pods/2b27cd46-94cb-4007-817a-cdff3f683134/volumes" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.619459 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77479b959-cq54q"] Jan 21 17:49:45 crc kubenswrapper[4799]: E0121 17:49:45.620079 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" containerName="registry-server" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.620092 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" containerName="registry-server" Jan 21 17:49:45 crc kubenswrapper[4799]: E0121 17:49:45.620110 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" containerName="extract-content" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.620116 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" containerName="extract-content" Jan 21 17:49:45 crc kubenswrapper[4799]: E0121 17:49:45.620152 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" containerName="extract-utilities" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.620159 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" containerName="extract-utilities" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.620301 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b27cd46-94cb-4007-817a-cdff3f683134" containerName="registry-server" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.621159 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.625434 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.625727 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.625927 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.627491 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77479b959-cq54q"] Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.628601 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-fx8sx" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.682510 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b8d888b5-8tgp5"] Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.684025 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.689267 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.697524 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8d888b5-8tgp5"] Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.723331 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p24m\" (UniqueName: \"kubernetes.io/projected/2c69c34b-8e93-4010-8753-7dcd05af8f2c-kube-api-access-5p24m\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.723445 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-dns-svc\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.723562 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-config\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.723677 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv4q7\" (UniqueName: \"kubernetes.io/projected/55bfc7c3-0320-4612-90f9-0e30f960b573-kube-api-access-vv4q7\") pod \"dnsmasq-dns-77479b959-cq54q\" (UID: \"55bfc7c3-0320-4612-90f9-0e30f960b573\") " pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.723865 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55bfc7c3-0320-4612-90f9-0e30f960b573-config\") pod \"dnsmasq-dns-77479b959-cq54q\" (UID: \"55bfc7c3-0320-4612-90f9-0e30f960b573\") " pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.825438 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55bfc7c3-0320-4612-90f9-0e30f960b573-config\") pod \"dnsmasq-dns-77479b959-cq54q\" (UID: \"55bfc7c3-0320-4612-90f9-0e30f960b573\") " pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.826197 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p24m\" (UniqueName: \"kubernetes.io/projected/2c69c34b-8e93-4010-8753-7dcd05af8f2c-kube-api-access-5p24m\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.826687 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55bfc7c3-0320-4612-90f9-0e30f960b573-config\") pod \"dnsmasq-dns-77479b959-cq54q\" (UID: \"55bfc7c3-0320-4612-90f9-0e30f960b573\") " pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.826939 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-dns-svc\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.827892 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-dns-svc\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.827971 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-config\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.828614 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-config\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.828705 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv4q7\" (UniqueName: \"kubernetes.io/projected/55bfc7c3-0320-4612-90f9-0e30f960b573-kube-api-access-vv4q7\") pod \"dnsmasq-dns-77479b959-cq54q\" (UID: \"55bfc7c3-0320-4612-90f9-0e30f960b573\") " pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.850609 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv4q7\" (UniqueName: \"kubernetes.io/projected/55bfc7c3-0320-4612-90f9-0e30f960b573-kube-api-access-vv4q7\") pod \"dnsmasq-dns-77479b959-cq54q\" (UID: \"55bfc7c3-0320-4612-90f9-0e30f960b573\") " pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.852116 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p24m\" (UniqueName: \"kubernetes.io/projected/2c69c34b-8e93-4010-8753-7dcd05af8f2c-kube-api-access-5p24m\") pod \"dnsmasq-dns-8b8d888b5-8tgp5\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:45 crc kubenswrapper[4799]: I0121 17:49:45.943786 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:49:46 crc kubenswrapper[4799]: I0121 17:49:46.005851 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:49:46 crc kubenswrapper[4799]: I0121 17:49:46.429845 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77479b959-cq54q"] Jan 21 17:49:46 crc kubenswrapper[4799]: I0121 17:49:46.505706 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8d888b5-8tgp5"] Jan 21 17:49:47 crc kubenswrapper[4799]: I0121 17:49:47.012696 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" event={"ID":"2c69c34b-8e93-4010-8753-7dcd05af8f2c","Type":"ContainerStarted","Data":"4ed8a38c3cc28d46ee0d70f84f2621f2e2154a9da16549a4e74dc76ddfc9843d"} Jan 21 17:49:47 crc kubenswrapper[4799]: I0121 17:49:47.017892 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77479b959-cq54q" event={"ID":"55bfc7c3-0320-4612-90f9-0e30f960b573","Type":"ContainerStarted","Data":"0920294bcdd33c2ef2a5bfa27d232b12471227d8f8d0021ce7652a2c837941b5"} Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.340166 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77479b959-cq54q"] Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.364312 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-546bf79c69-8qlv6"] Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.365847 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.374719 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-546bf79c69-8qlv6"] Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.512017 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-config\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.512372 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-dns-svc\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.512421 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr5zl\" (UniqueName: \"kubernetes.io/projected/e36cf457-3cc4-42c9-b5d8-46116a53f677-kube-api-access-qr5zl\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.614546 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr5zl\" (UniqueName: \"kubernetes.io/projected/e36cf457-3cc4-42c9-b5d8-46116a53f677-kube-api-access-qr5zl\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.614649 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-config\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.614711 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-dns-svc\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.615524 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-dns-svc\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.616370 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-config\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.643006 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr5zl\" (UniqueName: \"kubernetes.io/projected/e36cf457-3cc4-42c9-b5d8-46116a53f677-kube-api-access-qr5zl\") pod \"dnsmasq-dns-546bf79c69-8qlv6\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.684388 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8d888b5-8tgp5"] Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.694427 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.709044 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-99796b587-4m4v4"] Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.721794 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-99796b587-4m4v4"] Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.721929 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.820071 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-dns-svc\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.820146 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-config\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.820174 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95dz2\" (UniqueName: \"kubernetes.io/projected/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-kube-api-access-95dz2\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.922747 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-dns-svc\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.922978 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-config\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.923102 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95dz2\" (UniqueName: \"kubernetes.io/projected/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-kube-api-access-95dz2\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.924157 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-config\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.924588 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-dns-svc\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.946406 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95dz2\" (UniqueName: \"kubernetes.io/projected/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-kube-api-access-95dz2\") pod \"dnsmasq-dns-99796b587-4m4v4\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:49 crc kubenswrapper[4799]: I0121 17:49:49.996522 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-546bf79c69-8qlv6"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.016384 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6749c445df-26v2l"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.029932 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.034450 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6749c445df-26v2l"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.043770 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.135956 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-config\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.136050 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-dns-svc\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.136107 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckhhd\" (UniqueName: \"kubernetes.io/projected/444bcff7-378c-48a7-8f97-d637df27d7e1-kube-api-access-ckhhd\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.237554 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckhhd\" (UniqueName: \"kubernetes.io/projected/444bcff7-378c-48a7-8f97-d637df27d7e1-kube-api-access-ckhhd\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.237640 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-config\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.237684 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-dns-svc\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.238611 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-dns-svc\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.238638 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-config\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.261515 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckhhd\" (UniqueName: \"kubernetes.io/projected/444bcff7-378c-48a7-8f97-d637df27d7e1-kube-api-access-ckhhd\") pod \"dnsmasq-dns-6749c445df-26v2l\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.383285 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-546bf79c69-8qlv6"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.387541 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:49:50 crc kubenswrapper[4799]: W0121 17:49:50.441591 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode36cf457_3cc4_42c9_b5d8_46116a53f677.slice/crio-c3bbf7536eb40601166385ce56f94d85a684191db359fcae3507abf9b1957da0 WatchSource:0}: Error finding container c3bbf7536eb40601166385ce56f94d85a684191db359fcae3507abf9b1957da0: Status 404 returned error can't find the container with id c3bbf7536eb40601166385ce56f94d85a684191db359fcae3507abf9b1957da0 Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.535231 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.536476 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.540424 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.540424 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.540634 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.552999 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.554532 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-nwh8p" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.554826 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.554974 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.559349 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.675501 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.675553 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.675963 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.676164 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-config-data\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.676205 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.676448 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rckht\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-kube-api-access-rckht\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.676504 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48f0f966-0779-4959-884e-eae4ed66e969-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.676640 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48f0f966-0779-4959-884e-eae4ed66e969-pod-info\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.676731 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.676847 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.676966 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-server-conf\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.734382 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-99796b587-4m4v4"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779474 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779531 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-server-conf\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779577 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779595 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779619 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779643 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-config-data\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779681 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779743 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rckht\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-kube-api-access-rckht\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779772 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48f0f966-0779-4959-884e-eae4ed66e969-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779799 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48f0f966-0779-4959-884e-eae4ed66e969-pod-info\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.779822 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.780320 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.781518 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-config-data\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.781937 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.783217 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-server-conf\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.787549 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.787638 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48f0f966-0779-4959-884e-eae4ed66e969-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.788049 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.794658 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.794835 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.797579 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rckht\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-kube-api-access-rckht\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.811919 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48f0f966-0779-4959-884e-eae4ed66e969-pod-info\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.832739 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.834641 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.843300 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.843560 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.844318 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.844341 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.844465 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.844717 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-c67js" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.844763 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.854846 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.875693 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6749c445df-26v2l"] Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.881979 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882034 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/03a5694f-1e8b-490e-be8f-dce31bdd83c3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882063 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/03a5694f-1e8b-490e-be8f-dce31bdd83c3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882100 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882157 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882205 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46nw9\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-kube-api-access-46nw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882247 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882267 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882305 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882328 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.882348 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.883049 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.935360 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984491 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984544 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984591 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984635 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984699 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984723 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/03a5694f-1e8b-490e-be8f-dce31bdd83c3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984779 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/03a5694f-1e8b-490e-be8f-dce31bdd83c3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984801 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984836 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.984873 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46nw9\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-kube-api-access-46nw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.985100 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.985578 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.986462 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.986607 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.987183 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.987756 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.990181 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/03a5694f-1e8b-490e-be8f-dce31bdd83c3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.990220 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:50 crc kubenswrapper[4799]: I0121 17:49:50.993400 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/03a5694f-1e8b-490e-be8f-dce31bdd83c3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.008854 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.013074 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46nw9\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-kube-api-access-46nw9\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.022857 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.113407 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6749c445df-26v2l" event={"ID":"444bcff7-378c-48a7-8f97-d637df27d7e1","Type":"ContainerStarted","Data":"8e8977c59d51653ec0eb19843038313aa1a6a74a8f5ae332cb7365f42154fccd"} Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.115917 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-99796b587-4m4v4" event={"ID":"5cd788d2-0579-43c0-a7ef-a22ff27d8e13","Type":"ContainerStarted","Data":"209a4a41c8313172d1978b44078741105600902b6f0737aa4e1515e21da57f4e"} Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.120840 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" event={"ID":"e36cf457-3cc4-42c9-b5d8-46116a53f677","Type":"ContainerStarted","Data":"c3bbf7536eb40601166385ce56f94d85a684191db359fcae3507abf9b1957da0"} Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.184644 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.187481 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.193737 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.215812 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.247087 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-default-user" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.248028 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-erlang-cookie" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.248200 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-config-data" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.248464 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-notifications-svc" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.248596 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-server-conf" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.248733 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-server-dockercfg-t6vwm" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.248918 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-plugins-conf" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.356852 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.392565 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/63677f61-4283-417a-bcf7-303840452589-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.392642 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.392775 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.392816 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.392846 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74mfd\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-kube-api-access-74mfd\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.392867 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.392912 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.392951 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.393010 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/63677f61-4283-417a-bcf7-303840452589-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.393061 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/63677f61-4283-417a-bcf7-303840452589-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.393307 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/63677f61-4283-417a-bcf7-303840452589-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495348 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495423 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495487 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74mfd\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-kube-api-access-74mfd\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495510 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495567 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495636 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/63677f61-4283-417a-bcf7-303840452589-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495653 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/63677f61-4283-417a-bcf7-303840452589-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495680 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/63677f61-4283-417a-bcf7-303840452589-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.495796 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/63677f61-4283-417a-bcf7-303840452589-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.496521 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.500708 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.514553 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.514885 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/63677f61-4283-417a-bcf7-303840452589-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.518189 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/63677f61-4283-417a-bcf7-303840452589-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.519421 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/63677f61-4283-417a-bcf7-303840452589-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.523030 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/63677f61-4283-417a-bcf7-303840452589-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.524574 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.524630 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.524868 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/63677f61-4283-417a-bcf7-303840452589-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.567805 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74mfd\" (UniqueName: \"kubernetes.io/projected/63677f61-4283-417a-bcf7-303840452589-kube-api-access-74mfd\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.588261 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"63677f61-4283-417a-bcf7-303840452589\") " pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.846528 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:49:51 crc kubenswrapper[4799]: I0121 17:49:51.868367 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.140952 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"48f0f966-0779-4959-884e-eae4ed66e969","Type":"ContainerStarted","Data":"766d0aa0cbba878c9cf0f4bfc16780f81630549cfa0224a60ee50b57f0b2f61f"} Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.659689 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.661385 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.665053 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.665279 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.665828 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.666112 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-nqbxj" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.694373 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.715865 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.836035 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snwbg\" (UniqueName: \"kubernetes.io/projected/04f9c729-36bb-4aa5-9060-af5b0666b196-kube-api-access-snwbg\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.836388 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.837240 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-config-data-default\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.840621 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-kolla-config\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.840681 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/04f9c729-36bb-4aa5-9060-af5b0666b196-config-data-generated\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.842726 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-operator-scripts\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.842770 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f9c729-36bb-4aa5-9060-af5b0666b196-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.842790 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f9c729-36bb-4aa5-9060-af5b0666b196-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945097 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-config-data-default\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945266 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-kolla-config\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945290 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/04f9c729-36bb-4aa5-9060-af5b0666b196-config-data-generated\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945340 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-operator-scripts\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945361 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f9c729-36bb-4aa5-9060-af5b0666b196-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945377 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f9c729-36bb-4aa5-9060-af5b0666b196-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945403 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snwbg\" (UniqueName: \"kubernetes.io/projected/04f9c729-36bb-4aa5-9060-af5b0666b196-kube-api-access-snwbg\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945430 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.945744 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.947159 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-kolla-config\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.947332 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-config-data-default\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.947362 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/04f9c729-36bb-4aa5-9060-af5b0666b196-config-data-generated\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.948765 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04f9c729-36bb-4aa5-9060-af5b0666b196-operator-scripts\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.953783 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/04f9c729-36bb-4aa5-9060-af5b0666b196-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.966296 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04f9c729-36bb-4aa5-9060-af5b0666b196-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.976514 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snwbg\" (UniqueName: \"kubernetes.io/projected/04f9c729-36bb-4aa5-9060-af5b0666b196-kube-api-access-snwbg\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:52 crc kubenswrapper[4799]: I0121 17:49:52.985274 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"04f9c729-36bb-4aa5-9060-af5b0666b196\") " pod="openstack/openstack-galera-0" Jan 21 17:49:53 crc kubenswrapper[4799]: I0121 17:49:53.003637 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 21 17:49:53 crc kubenswrapper[4799]: I0121 17:49:53.996699 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.010050 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.012799 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-gtw9t" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.013089 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.013306 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.013609 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.039584 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.171389 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.171451 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.171808 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.171900 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.171975 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.172399 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.172464 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.172496 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5gjl\" (UniqueName: \"kubernetes.io/projected/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-kube-api-access-b5gjl\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.241808 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.243527 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.248595 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.248840 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.249470 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-lwqzx" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.263048 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.275472 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.275527 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.275551 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.275660 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.275696 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.275734 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5gjl\" (UniqueName: \"kubernetes.io/projected/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-kube-api-access-b5gjl\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.275772 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.275794 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.277363 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.285469 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.286566 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.286968 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.319472 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.320010 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.321036 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.323610 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5gjl\" (UniqueName: \"kubernetes.io/projected/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-kube-api-access-b5gjl\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.324115 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.324956 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.325894 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e95ea0b2-ade1-4aaa-ad67-b85ebde84afa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.348747 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa\") " pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.378879 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c75e85a7-0869-4fe4-ba35-d51f6107027c-config-data\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.379456 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c75e85a7-0869-4fe4-ba35-d51f6107027c-kolla-config\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.379517 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq6tc\" (UniqueName: \"kubernetes.io/projected/c75e85a7-0869-4fe4-ba35-d51f6107027c-kube-api-access-wq6tc\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.379542 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c75e85a7-0869-4fe4-ba35-d51f6107027c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.379852 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c75e85a7-0869-4fe4-ba35-d51f6107027c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.482446 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c75e85a7-0869-4fe4-ba35-d51f6107027c-kolla-config\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.482547 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq6tc\" (UniqueName: \"kubernetes.io/projected/c75e85a7-0869-4fe4-ba35-d51f6107027c-kube-api-access-wq6tc\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.482582 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c75e85a7-0869-4fe4-ba35-d51f6107027c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.482636 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c75e85a7-0869-4fe4-ba35-d51f6107027c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.482691 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c75e85a7-0869-4fe4-ba35-d51f6107027c-config-data\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.492879 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c75e85a7-0869-4fe4-ba35-d51f6107027c-config-data\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.496383 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c75e85a7-0869-4fe4-ba35-d51f6107027c-kolla-config\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.515922 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq6tc\" (UniqueName: \"kubernetes.io/projected/c75e85a7-0869-4fe4-ba35-d51f6107027c-kube-api-access-wq6tc\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.515960 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c75e85a7-0869-4fe4-ba35-d51f6107027c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.528192 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c75e85a7-0869-4fe4-ba35-d51f6107027c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c75e85a7-0869-4fe4-ba35-d51f6107027c\") " pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.587664 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.646276 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-gtw9t" Jan 21 17:49:54 crc kubenswrapper[4799]: I0121 17:49:54.654571 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 21 17:49:55 crc kubenswrapper[4799]: I0121 17:49:55.970885 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:49:55 crc kubenswrapper[4799]: I0121 17:49:55.970989 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:49:56 crc kubenswrapper[4799]: I0121 17:49:56.531732 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:49:56 crc kubenswrapper[4799]: I0121 17:49:56.559274 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:49:56 crc kubenswrapper[4799]: I0121 17:49:56.559477 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 21 17:49:56 crc kubenswrapper[4799]: I0121 17:49:56.563421 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-p4x8x" Jan 21 17:49:56 crc kubenswrapper[4799]: I0121 17:49:56.741161 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj95x\" (UniqueName: \"kubernetes.io/projected/59e07a31-b75b-4e5b-827f-8ce5617a3810-kube-api-access-bj95x\") pod \"kube-state-metrics-0\" (UID: \"59e07a31-b75b-4e5b-827f-8ce5617a3810\") " pod="openstack/kube-state-metrics-0" Jan 21 17:49:56 crc kubenswrapper[4799]: I0121 17:49:56.844121 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj95x\" (UniqueName: \"kubernetes.io/projected/59e07a31-b75b-4e5b-827f-8ce5617a3810-kube-api-access-bj95x\") pod \"kube-state-metrics-0\" (UID: \"59e07a31-b75b-4e5b-827f-8ce5617a3810\") " pod="openstack/kube-state-metrics-0" Jan 21 17:49:56 crc kubenswrapper[4799]: I0121 17:49:56.868437 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj95x\" (UniqueName: \"kubernetes.io/projected/59e07a31-b75b-4e5b-827f-8ce5617a3810-kube-api-access-bj95x\") pod \"kube-state-metrics-0\" (UID: \"59e07a31-b75b-4e5b-827f-8ce5617a3810\") " pod="openstack/kube-state-metrics-0" Jan 21 17:49:56 crc kubenswrapper[4799]: I0121 17:49:56.897858 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.913190 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.915389 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.920358 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.920690 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.920767 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-8j9xl" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.920790 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.921619 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.921745 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.922012 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.928848 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 21 17:49:57 crc kubenswrapper[4799]: I0121 17:49:57.933979 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093169 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093264 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093336 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093409 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093465 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093509 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c3dbd916-66ac-4f70-a011-68d4195c5c44-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093550 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-config\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093590 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxx4k\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-kube-api-access-bxx4k\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093636 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.093658 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.205381 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c3dbd916-66ac-4f70-a011-68d4195c5c44-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.205489 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-config\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.205556 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.205720 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.205745 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxx4k\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-kube-api-access-bxx4k\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.206397 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.206773 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.207032 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.207111 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.207430 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.209385 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.216491 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c3dbd916-66ac-4f70-a011-68d4195c5c44-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.218618 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-config\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.219942 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.220673 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.223594 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.223836 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.229252 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.241200 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxx4k\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-kube-api-access-bxx4k\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.275595 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.275638 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8cc5b1a9ff3eab274f8795ef882996bdac004679de968d37b59819fb3c1cc7c5/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.508375 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:58 crc kubenswrapper[4799]: I0121 17:49:58.542226 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.648784 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-68wt5"] Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.650175 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.653763 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-run\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.653816 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-run-ovn\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.653834 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05213e52-1f99-42a4-b882-4514760063c7-combined-ca-bundle\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.653868 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww2lh\" (UniqueName: \"kubernetes.io/projected/05213e52-1f99-42a4-b882-4514760063c7-kube-api-access-ww2lh\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.653895 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/05213e52-1f99-42a4-b882-4514760063c7-ovn-controller-tls-certs\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.653964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05213e52-1f99-42a4-b882-4514760063c7-scripts\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.654013 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-log-ovn\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.657832 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.658653 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.658956 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-p9472" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.681836 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-68wt5"] Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.699765 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-5dwpd"] Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.703894 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.719647 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5dwpd"] Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.757807 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-log-ovn\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.757930 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-run\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.757963 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-run-ovn\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.757988 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05213e52-1f99-42a4-b882-4514760063c7-combined-ca-bundle\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.758019 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww2lh\" (UniqueName: \"kubernetes.io/projected/05213e52-1f99-42a4-b882-4514760063c7-kube-api-access-ww2lh\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.758046 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/05213e52-1f99-42a4-b882-4514760063c7-ovn-controller-tls-certs\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.759045 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-run\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.759141 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-log-ovn\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.759159 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/05213e52-1f99-42a4-b882-4514760063c7-var-run-ovn\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.759235 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05213e52-1f99-42a4-b882-4514760063c7-scripts\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.761467 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/05213e52-1f99-42a4-b882-4514760063c7-scripts\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.767018 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/05213e52-1f99-42a4-b882-4514760063c7-ovn-controller-tls-certs\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.769924 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05213e52-1f99-42a4-b882-4514760063c7-combined-ca-bundle\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.781670 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww2lh\" (UniqueName: \"kubernetes.io/projected/05213e52-1f99-42a4-b882-4514760063c7-kube-api-access-ww2lh\") pod \"ovn-controller-68wt5\" (UID: \"05213e52-1f99-42a4-b882-4514760063c7\") " pod="openstack/ovn-controller-68wt5" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.863266 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d199dae-6bd1-48c4-8a95-25ffd4555e29-scripts\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.863320 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-lib\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.863377 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-run\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.863458 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-etc-ovs\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.863534 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btjb9\" (UniqueName: \"kubernetes.io/projected/0d199dae-6bd1-48c4-8a95-25ffd4555e29-kube-api-access-btjb9\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.863583 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-log\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.968451 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btjb9\" (UniqueName: \"kubernetes.io/projected/0d199dae-6bd1-48c4-8a95-25ffd4555e29-kube-api-access-btjb9\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.968526 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-log\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.968597 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d199dae-6bd1-48c4-8a95-25ffd4555e29-scripts\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.968625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-lib\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.968664 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-run\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.968736 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-etc-ovs\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.968970 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-log\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.969059 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-etc-ovs\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.969063 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-run\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.969216 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d199dae-6bd1-48c4-8a95-25ffd4555e29-var-lib\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.971261 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d199dae-6bd1-48c4-8a95-25ffd4555e29-scripts\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:49:59 crc kubenswrapper[4799]: I0121 17:49:59.984635 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-68wt5" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.006488 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btjb9\" (UniqueName: \"kubernetes.io/projected/0d199dae-6bd1-48c4-8a95-25ffd4555e29-kube-api-access-btjb9\") pod \"ovn-controller-ovs-5dwpd\" (UID: \"0d199dae-6bd1-48c4-8a95-25ffd4555e29\") " pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.027354 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.545292 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.547725 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.552528 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.553351 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.553863 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-n546j" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.554211 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.554417 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.563912 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.689300 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58187703-2c52-4f99-8d9a-65306c90c5ed-config\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.690090 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.690394 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.690580 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58187703-2c52-4f99-8d9a-65306c90c5ed-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.690712 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ql6f\" (UniqueName: \"kubernetes.io/projected/58187703-2c52-4f99-8d9a-65306c90c5ed-kube-api-access-5ql6f\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.690871 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.690993 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58187703-2c52-4f99-8d9a-65306c90c5ed-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.691163 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.793423 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58187703-2c52-4f99-8d9a-65306c90c5ed-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.793506 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ql6f\" (UniqueName: \"kubernetes.io/projected/58187703-2c52-4f99-8d9a-65306c90c5ed-kube-api-access-5ql6f\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.793584 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.793625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58187703-2c52-4f99-8d9a-65306c90c5ed-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.793707 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.793749 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58187703-2c52-4f99-8d9a-65306c90c5ed-config\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.793773 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.793790 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.796758 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/58187703-2c52-4f99-8d9a-65306c90c5ed-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.797230 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58187703-2c52-4f99-8d9a-65306c90c5ed-config\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.797578 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.797694 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/58187703-2c52-4f99-8d9a-65306c90c5ed-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.800405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.805117 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.816609 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58187703-2c52-4f99-8d9a-65306c90c5ed-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.838066 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ql6f\" (UniqueName: \"kubernetes.io/projected/58187703-2c52-4f99-8d9a-65306c90c5ed-kube-api-access-5ql6f\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.843673 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"58187703-2c52-4f99-8d9a-65306c90c5ed\") " pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:00 crc kubenswrapper[4799]: I0121 17:50:00.931536 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.738659 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.752994 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.757852 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.774071 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.774344 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.774563 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-cvtdk" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.774807 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.859950 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acea8227-6d95-4c5f-bba7-8e954701de28-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.860018 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.860064 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.860094 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j988t\" (UniqueName: \"kubernetes.io/projected/acea8227-6d95-4c5f-bba7-8e954701de28-kube-api-access-j988t\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.860158 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.860174 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acea8227-6d95-4c5f-bba7-8e954701de28-config\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.860217 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acea8227-6d95-4c5f-bba7-8e954701de28-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.860254 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.962409 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acea8227-6d95-4c5f-bba7-8e954701de28-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.962485 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.962524 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acea8227-6d95-4c5f-bba7-8e954701de28-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.962541 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.962577 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.962596 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j988t\" (UniqueName: \"kubernetes.io/projected/acea8227-6d95-4c5f-bba7-8e954701de28-kube-api-access-j988t\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.962644 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.962660 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acea8227-6d95-4c5f-bba7-8e954701de28-config\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.963541 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.964538 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acea8227-6d95-4c5f-bba7-8e954701de28-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.963552 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acea8227-6d95-4c5f-bba7-8e954701de28-config\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.964952 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acea8227-6d95-4c5f-bba7-8e954701de28-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.982562 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:03 crc kubenswrapper[4799]: I0121 17:50:03.993803 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:04 crc kubenswrapper[4799]: I0121 17:50:04.000899 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/acea8227-6d95-4c5f-bba7-8e954701de28-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:04 crc kubenswrapper[4799]: I0121 17:50:04.018150 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j988t\" (UniqueName: \"kubernetes.io/projected/acea8227-6d95-4c5f-bba7-8e954701de28-kube-api-access-j988t\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:04 crc kubenswrapper[4799]: I0121 17:50:04.034405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"acea8227-6d95-4c5f-bba7-8e954701de28\") " pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:04 crc kubenswrapper[4799]: I0121 17:50:04.090403 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:11 crc kubenswrapper[4799]: I0121 17:50:11.355998 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"03a5694f-1e8b-490e-be8f-dce31bdd83c3","Type":"ContainerStarted","Data":"a3a5c63d36782c1c68d50cb46db514f4b49f6ea52b039d826b2e8359f2c1d7d7"} Jan 21 17:50:21 crc kubenswrapper[4799]: E0121 17:50:21.036299 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Jan 21 17:50:21 crc kubenswrapper[4799]: E0121 17:50:21.036735 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest" Jan 21 17:50:21 crc kubenswrapper[4799]: E0121 17:50:21.036992 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:38.102.83.30:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rckht,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(48f0f966-0779-4959-884e-eae4ed66e969): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:50:21 crc kubenswrapper[4799]: E0121 17:50:21.038494 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="48f0f966-0779-4959-884e-eae4ed66e969" Jan 21 17:50:21 crc kubenswrapper[4799]: I0121 17:50:21.432721 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 21 17:50:21 crc kubenswrapper[4799]: E0121 17:50:21.454081 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-rabbitmq:watcher_latest\\\"\"" pod="openstack/rabbitmq-server-0" podUID="48f0f966-0779-4959-884e-eae4ed66e969" Jan 21 17:50:21 crc kubenswrapper[4799]: I0121 17:50:21.625557 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 21 17:50:22 crc kubenswrapper[4799]: W0121 17:50:22.080321 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode95ea0b2_ade1_4aaa_ad67_b85ebde84afa.slice/crio-6cbeecf6fe8a5c8ac094c964d1d04facf1ee941d2463e64ae72a078570469786 WatchSource:0}: Error finding container 6cbeecf6fe8a5c8ac094c964d1d04facf1ee941d2463e64ae72a078570469786: Status 404 returned error can't find the container with id 6cbeecf6fe8a5c8ac094c964d1d04facf1ee941d2463e64ae72a078570469786 Jan 21 17:50:22 crc kubenswrapper[4799]: W0121 17:50:22.081253 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc75e85a7_0869_4fe4_ba35_d51f6107027c.slice/crio-8c273364d06a5c59f67181c666ba5a442816c8229f33a79fdab29a1c60283762 WatchSource:0}: Error finding container 8c273364d06a5c59f67181c666ba5a442816c8229f33a79fdab29a1c60283762: Status 404 returned error can't find the container with id 8c273364d06a5c59f67181c666ba5a442816c8229f33a79fdab29a1c60283762 Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.118353 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.118427 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.118674 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qr5zl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-546bf79c69-8qlv6_openstack(e36cf457-3cc4-42c9-b5d8-46116a53f677): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.127283 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" podUID="e36cf457-3cc4-42c9-b5d8-46116a53f677" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.141395 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.141464 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.141605 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-95dz2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-99796b587-4m4v4_openstack(5cd788d2-0579-43c0-a7ef-a22ff27d8e13): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.142809 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-99796b587-4m4v4" podUID="5cd788d2-0579-43c0-a7ef-a22ff27d8e13" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.143920 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.147676 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.147830 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5p24m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8b8d888b5-8tgp5_openstack(2c69c34b-8e93-4010-8753-7dcd05af8f2c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.150721 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" podUID="2c69c34b-8e93-4010-8753-7dcd05af8f2c" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.152818 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.152852 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.152977 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vv4q7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-77479b959-cq54q_openstack(55bfc7c3-0320-4612-90f9-0e30f960b573): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.154186 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-77479b959-cq54q" podUID="55bfc7c3-0320-4612-90f9-0e30f960b573" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.211805 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.211867 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.212026 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5c7h56dh5cfh8bh54fhbbhf4h5b9hdch67fhd7h55fh55fh6ch9h548h54ch665h647h6h8fhd6h5dfh5cdh58bh577h66fh695h5fbh55h77h5fcq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ckhhd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6749c445df-26v2l_openstack(444bcff7-378c-48a7-8f97-d637df27d7e1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.213264 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-6749c445df-26v2l" podUID="444bcff7-378c-48a7-8f97-d637df27d7e1" Jan 21 17:50:22 crc kubenswrapper[4799]: I0121 17:50:22.463745 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c75e85a7-0869-4fe4-ba35-d51f6107027c","Type":"ContainerStarted","Data":"8c273364d06a5c59f67181c666ba5a442816c8229f33a79fdab29a1c60283762"} Jan 21 17:50:22 crc kubenswrapper[4799]: I0121 17:50:22.467422 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa","Type":"ContainerStarted","Data":"6cbeecf6fe8a5c8ac094c964d1d04facf1ee941d2463e64ae72a078570469786"} Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.470205 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest\\\"\"" pod="openstack/dnsmasq-dns-99796b587-4m4v4" podUID="5cd788d2-0579-43c0-a7ef-a22ff27d8e13" Jan 21 17:50:22 crc kubenswrapper[4799]: E0121 17:50:22.471889 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest\\\"\"" pod="openstack/dnsmasq-dns-6749c445df-26v2l" podUID="444bcff7-378c-48a7-8f97-d637df27d7e1" Jan 21 17:50:22 crc kubenswrapper[4799]: I0121 17:50:22.911528 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-68wt5"] Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.240085 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.275795 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.307713 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.338633 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5dwpd"] Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.351047 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.358602 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.479749 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-68wt5" event={"ID":"05213e52-1f99-42a4-b882-4514760063c7","Type":"ContainerStarted","Data":"092b3bedb3fd77c6307fb0fea19ec77db8076f0c8153bbb4da3ab7c1e683fe3c"} Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.482041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"03a5694f-1e8b-490e-be8f-dce31bdd83c3","Type":"ContainerStarted","Data":"d18ca9012873ef22c48f7bd29f7fe503167022792fd44328ccf69dd39dbcf871"} Jan 21 17:50:23 crc kubenswrapper[4799]: I0121 17:50:23.874983 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 21 17:50:23 crc kubenswrapper[4799]: W0121 17:50:23.950333 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58187703_2c52_4f99_8d9a_65306c90c5ed.slice/crio-92a1e21e7f30142d9eaca5a00a9a57bf5036f40584207b36bed92ab055cd3e97 WatchSource:0}: Error finding container 92a1e21e7f30142d9eaca5a00a9a57bf5036f40584207b36bed92ab055cd3e97: Status 404 returned error can't find the container with id 92a1e21e7f30142d9eaca5a00a9a57bf5036f40584207b36bed92ab055cd3e97 Jan 21 17:50:23 crc kubenswrapper[4799]: W0121 17:50:23.956333 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacea8227_6d95_4c5f_bba7_8e954701de28.slice/crio-fee53628aad6f392c41b548f6cc4b2afe9864229370c39d2628e7d9100d90c29 WatchSource:0}: Error finding container fee53628aad6f392c41b548f6cc4b2afe9864229370c39d2628e7d9100d90c29: Status 404 returned error can't find the container with id fee53628aad6f392c41b548f6cc4b2afe9864229370c39d2628e7d9100d90c29 Jan 21 17:50:23 crc kubenswrapper[4799]: W0121 17:50:23.957856 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d199dae_6bd1_48c4_8a95_25ffd4555e29.slice/crio-013a8e483c6682ae2bb91eff714be9f69c84abcd8ef674f08a51f2ee8e17ef9d WatchSource:0}: Error finding container 013a8e483c6682ae2bb91eff714be9f69c84abcd8ef674f08a51f2ee8e17ef9d: Status 404 returned error can't find the container with id 013a8e483c6682ae2bb91eff714be9f69c84abcd8ef674f08a51f2ee8e17ef9d Jan 21 17:50:23 crc kubenswrapper[4799]: W0121 17:50:23.960952 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3dbd916_66ac_4f70_a011_68d4195c5c44.slice/crio-9d3e0550623e5faca9b9fa7854b55fba588614d2dbccfbd3cb8345d670ceea7b WatchSource:0}: Error finding container 9d3e0550623e5faca9b9fa7854b55fba588614d2dbccfbd3cb8345d670ceea7b: Status 404 returned error can't find the container with id 9d3e0550623e5faca9b9fa7854b55fba588614d2dbccfbd3cb8345d670ceea7b Jan 21 17:50:23 crc kubenswrapper[4799]: W0121 17:50:23.965511 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04f9c729_36bb_4aa5_9060_af5b0666b196.slice/crio-700424e46ef7040b6e874a13509f1046bacb76682fd12daa5fd8874bc7294244 WatchSource:0}: Error finding container 700424e46ef7040b6e874a13509f1046bacb76682fd12daa5fd8874bc7294244: Status 404 returned error can't find the container with id 700424e46ef7040b6e874a13509f1046bacb76682fd12daa5fd8874bc7294244 Jan 21 17:50:23 crc kubenswrapper[4799]: W0121 17:50:23.969389 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59e07a31_b75b_4e5b_827f_8ce5617a3810.slice/crio-9253eab859c8ffdc2c89d84dec239b80c07145d6df979aa6b65594e58b50f5b8 WatchSource:0}: Error finding container 9253eab859c8ffdc2c89d84dec239b80c07145d6df979aa6b65594e58b50f5b8: Status 404 returned error can't find the container with id 9253eab859c8ffdc2c89d84dec239b80c07145d6df979aa6b65594e58b50f5b8 Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.074974 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.089984 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.156311 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr5zl\" (UniqueName: \"kubernetes.io/projected/e36cf457-3cc4-42c9-b5d8-46116a53f677-kube-api-access-qr5zl\") pod \"e36cf457-3cc4-42c9-b5d8-46116a53f677\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.156382 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-dns-svc\") pod \"e36cf457-3cc4-42c9-b5d8-46116a53f677\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.156434 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-config\") pod \"e36cf457-3cc4-42c9-b5d8-46116a53f677\" (UID: \"e36cf457-3cc4-42c9-b5d8-46116a53f677\") " Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.157246 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-config" (OuterVolumeSpecName: "config") pod "e36cf457-3cc4-42c9-b5d8-46116a53f677" (UID: "e36cf457-3cc4-42c9-b5d8-46116a53f677"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.158976 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e36cf457-3cc4-42c9-b5d8-46116a53f677" (UID: "e36cf457-3cc4-42c9-b5d8-46116a53f677"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.168609 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e36cf457-3cc4-42c9-b5d8-46116a53f677-kube-api-access-qr5zl" (OuterVolumeSpecName: "kube-api-access-qr5zl") pod "e36cf457-3cc4-42c9-b5d8-46116a53f677" (UID: "e36cf457-3cc4-42c9-b5d8-46116a53f677"). InnerVolumeSpecName "kube-api-access-qr5zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.182674 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.280753 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55bfc7c3-0320-4612-90f9-0e30f960b573-config\") pod \"55bfc7c3-0320-4612-90f9-0e30f960b573\" (UID: \"55bfc7c3-0320-4612-90f9-0e30f960b573\") " Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.280864 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv4q7\" (UniqueName: \"kubernetes.io/projected/55bfc7c3-0320-4612-90f9-0e30f960b573-kube-api-access-vv4q7\") pod \"55bfc7c3-0320-4612-90f9-0e30f960b573\" (UID: \"55bfc7c3-0320-4612-90f9-0e30f960b573\") " Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.281550 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55bfc7c3-0320-4612-90f9-0e30f960b573-config" (OuterVolumeSpecName: "config") pod "55bfc7c3-0320-4612-90f9-0e30f960b573" (UID: "55bfc7c3-0320-4612-90f9-0e30f960b573"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.282101 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr5zl\" (UniqueName: \"kubernetes.io/projected/e36cf457-3cc4-42c9-b5d8-46116a53f677-kube-api-access-qr5zl\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.282183 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.282232 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e36cf457-3cc4-42c9-b5d8-46116a53f677-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.282552 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55bfc7c3-0320-4612-90f9-0e30f960b573-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.284325 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55bfc7c3-0320-4612-90f9-0e30f960b573-kube-api-access-vv4q7" (OuterVolumeSpecName: "kube-api-access-vv4q7") pod "55bfc7c3-0320-4612-90f9-0e30f960b573" (UID: "55bfc7c3-0320-4612-90f9-0e30f960b573"). InnerVolumeSpecName "kube-api-access-vv4q7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.383304 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-config\") pod \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.383364 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p24m\" (UniqueName: \"kubernetes.io/projected/2c69c34b-8e93-4010-8753-7dcd05af8f2c-kube-api-access-5p24m\") pod \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.383434 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-dns-svc\") pod \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\" (UID: \"2c69c34b-8e93-4010-8753-7dcd05af8f2c\") " Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.384058 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv4q7\" (UniqueName: \"kubernetes.io/projected/55bfc7c3-0320-4612-90f9-0e30f960b573-kube-api-access-vv4q7\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.385008 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-config" (OuterVolumeSpecName: "config") pod "2c69c34b-8e93-4010-8753-7dcd05af8f2c" (UID: "2c69c34b-8e93-4010-8753-7dcd05af8f2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.384788 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c69c34b-8e93-4010-8753-7dcd05af8f2c" (UID: "2c69c34b-8e93-4010-8753-7dcd05af8f2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.397358 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c69c34b-8e93-4010-8753-7dcd05af8f2c-kube-api-access-5p24m" (OuterVolumeSpecName: "kube-api-access-5p24m") pod "2c69c34b-8e93-4010-8753-7dcd05af8f2c" (UID: "2c69c34b-8e93-4010-8753-7dcd05af8f2c"). InnerVolumeSpecName "kube-api-access-5p24m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.491159 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.491207 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c69c34b-8e93-4010-8753-7dcd05af8f2c-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.491220 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p24m\" (UniqueName: \"kubernetes.io/projected/2c69c34b-8e93-4010-8753-7dcd05af8f2c-kube-api-access-5p24m\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.495782 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" event={"ID":"e36cf457-3cc4-42c9-b5d8-46116a53f677","Type":"ContainerDied","Data":"c3bbf7536eb40601166385ce56f94d85a684191db359fcae3507abf9b1957da0"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.495920 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-546bf79c69-8qlv6" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.499087 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"04f9c729-36bb-4aa5-9060-af5b0666b196","Type":"ContainerStarted","Data":"700424e46ef7040b6e874a13509f1046bacb76682fd12daa5fd8874bc7294244"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.500559 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5dwpd" event={"ID":"0d199dae-6bd1-48c4-8a95-25ffd4555e29","Type":"ContainerStarted","Data":"013a8e483c6682ae2bb91eff714be9f69c84abcd8ef674f08a51f2ee8e17ef9d"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.503458 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" event={"ID":"2c69c34b-8e93-4010-8753-7dcd05af8f2c","Type":"ContainerDied","Data":"4ed8a38c3cc28d46ee0d70f84f2621f2e2154a9da16549a4e74dc76ddfc9843d"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.503578 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8d888b5-8tgp5" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.505098 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerStarted","Data":"9d3e0550623e5faca9b9fa7854b55fba588614d2dbccfbd3cb8345d670ceea7b"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.506361 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"acea8227-6d95-4c5f-bba7-8e954701de28","Type":"ContainerStarted","Data":"fee53628aad6f392c41b548f6cc4b2afe9864229370c39d2628e7d9100d90c29"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.508012 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"59e07a31-b75b-4e5b-827f-8ce5617a3810","Type":"ContainerStarted","Data":"9253eab859c8ffdc2c89d84dec239b80c07145d6df979aa6b65594e58b50f5b8"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.517451 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c75e85a7-0869-4fe4-ba35-d51f6107027c","Type":"ContainerStarted","Data":"d0fa546833fc0edd83c4df0585e3d309b0e4860ca46735c8c0c7f9ab56ef232b"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.517689 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.519717 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"63677f61-4283-417a-bcf7-303840452589","Type":"ContainerStarted","Data":"9867e8f83240539c65f23d7140689215880bd8f75bbaef31f30436d9f7a3455e"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.523570 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77479b959-cq54q" event={"ID":"55bfc7c3-0320-4612-90f9-0e30f960b573","Type":"ContainerDied","Data":"0920294bcdd33c2ef2a5bfa27d232b12471227d8f8d0021ce7652a2c837941b5"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.523666 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77479b959-cq54q" Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.525031 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"58187703-2c52-4f99-8d9a-65306c90c5ed","Type":"ContainerStarted","Data":"92a1e21e7f30142d9eaca5a00a9a57bf5036f40584207b36bed92ab055cd3e97"} Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.585669 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77479b959-cq54q"] Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.598894 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77479b959-cq54q"] Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.620101 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-546bf79c69-8qlv6"] Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.639403 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-546bf79c69-8qlv6"] Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.674091 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8d888b5-8tgp5"] Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.682554 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b8d888b5-8tgp5"] Jan 21 17:50:24 crc kubenswrapper[4799]: I0121 17:50:24.684634 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=28.680383051 podStartE2EDuration="30.684590743s" podCreationTimestamp="2026-01-21 17:49:54 +0000 UTC" firstStartedPulling="2026-01-21 17:50:22.092088159 +0000 UTC m=+1048.718378182" lastFinishedPulling="2026-01-21 17:50:24.096295851 +0000 UTC m=+1050.722585874" observedRunningTime="2026-01-21 17:50:24.666226498 +0000 UTC m=+1051.292516541" watchObservedRunningTime="2026-01-21 17:50:24.684590743 +0000 UTC m=+1051.310880756" Jan 21 17:50:25 crc kubenswrapper[4799]: I0121 17:50:25.534417 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"63677f61-4283-417a-bcf7-303840452589","Type":"ContainerStarted","Data":"3681834a9f785cada47be88dfb5ed1ef26743bce72da0c4700f98cfe1e1f5a32"} Jan 21 17:50:25 crc kubenswrapper[4799]: I0121 17:50:25.971711 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:50:25 crc kubenswrapper[4799]: I0121 17:50:25.971819 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:50:26 crc kubenswrapper[4799]: I0121 17:50:26.216880 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c69c34b-8e93-4010-8753-7dcd05af8f2c" path="/var/lib/kubelet/pods/2c69c34b-8e93-4010-8753-7dcd05af8f2c/volumes" Jan 21 17:50:26 crc kubenswrapper[4799]: I0121 17:50:26.217343 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55bfc7c3-0320-4612-90f9-0e30f960b573" path="/var/lib/kubelet/pods/55bfc7c3-0320-4612-90f9-0e30f960b573/volumes" Jan 21 17:50:26 crc kubenswrapper[4799]: I0121 17:50:26.217752 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e36cf457-3cc4-42c9-b5d8-46116a53f677" path="/var/lib/kubelet/pods/e36cf457-3cc4-42c9-b5d8-46116a53f677/volumes" Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.567543 4799 generic.go:334] "Generic (PLEG): container finished" podID="0d199dae-6bd1-48c4-8a95-25ffd4555e29" containerID="1b6a11888b3527d961ff1e12ec412d221904ebe31e3f05ad06329d3458342105" exitCode=0 Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.567689 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5dwpd" event={"ID":"0d199dae-6bd1-48c4-8a95-25ffd4555e29","Type":"ContainerDied","Data":"1b6a11888b3527d961ff1e12ec412d221904ebe31e3f05ad06329d3458342105"} Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.580698 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa","Type":"ContainerStarted","Data":"9d85f91d03abd760463fd52aa872cb91125a1c38fdd766926c69e6a5b7cea1bf"} Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.583429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"acea8227-6d95-4c5f-bba7-8e954701de28","Type":"ContainerStarted","Data":"ac3916e5d42a04be220432a302403b8001c7c2f6d8d484efca5afaac46a598ef"} Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.589756 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-68wt5" event={"ID":"05213e52-1f99-42a4-b882-4514760063c7","Type":"ContainerStarted","Data":"0b983eacec1ffc60243ef4d81aaf2ae805b788cc38147d85994df4325a1de486"} Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.590352 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-68wt5" Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.591343 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.603228 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"58187703-2c52-4f99-8d9a-65306c90c5ed","Type":"ContainerStarted","Data":"d1541111957ab3240224eaee17872b79b4ee04cf10d3c8e47f7796da802bb65c"} Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.613518 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"04f9c729-36bb-4aa5-9060-af5b0666b196","Type":"ContainerStarted","Data":"af7ad0fef324f139b2237deaaacd749979e51abc2502765b99a23b5dd7f98068"} Jan 21 17:50:29 crc kubenswrapper[4799]: I0121 17:50:29.658438 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-68wt5" podStartSLOduration=24.973243883 podStartE2EDuration="30.658413602s" podCreationTimestamp="2026-01-21 17:49:59 +0000 UTC" firstStartedPulling="2026-01-21 17:50:22.908814794 +0000 UTC m=+1049.535104817" lastFinishedPulling="2026-01-21 17:50:28.593984513 +0000 UTC m=+1055.220274536" observedRunningTime="2026-01-21 17:50:29.643673409 +0000 UTC m=+1056.269963452" watchObservedRunningTime="2026-01-21 17:50:29.658413602 +0000 UTC m=+1056.284703635" Jan 21 17:50:31 crc kubenswrapper[4799]: I0121 17:50:31.633253 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerStarted","Data":"5f544d8b4e7c301812a7ecb1256ebbdbbadec5f876ac61b8d332b6d8f7b81167"} Jan 21 17:50:31 crc kubenswrapper[4799]: I0121 17:50:31.637725 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5dwpd" event={"ID":"0d199dae-6bd1-48c4-8a95-25ffd4555e29","Type":"ContainerStarted","Data":"c4d853787534e5f61de8a09f767d424e11f18706f583d378f9c0889e1483ae32"} Jan 21 17:50:32 crc kubenswrapper[4799]: I0121 17:50:32.687422 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"59e07a31-b75b-4e5b-827f-8ce5617a3810","Type":"ContainerStarted","Data":"dc252ed2b77d1a62a0ddc449129ad03216084a08aaf5803006b7b737fe9cd43c"} Jan 21 17:50:32 crc kubenswrapper[4799]: I0121 17:50:32.688286 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 21 17:50:32 crc kubenswrapper[4799]: I0121 17:50:32.718842 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=29.889086677999998 podStartE2EDuration="36.718809802s" podCreationTimestamp="2026-01-21 17:49:56 +0000 UTC" firstStartedPulling="2026-01-21 17:50:23.972544002 +0000 UTC m=+1050.598834025" lastFinishedPulling="2026-01-21 17:50:30.802267126 +0000 UTC m=+1057.428557149" observedRunningTime="2026-01-21 17:50:32.712638399 +0000 UTC m=+1059.338928432" watchObservedRunningTime="2026-01-21 17:50:32.718809802 +0000 UTC m=+1059.345099825" Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.696626 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5dwpd" event={"ID":"0d199dae-6bd1-48c4-8a95-25ffd4555e29","Type":"ContainerStarted","Data":"860c5a8cc4f1faf633a6a1c55a72f671f6c5e542b9dd1fcb89103d2dff8ccf8a"} Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.696980 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.697001 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.700668 4799 generic.go:334] "Generic (PLEG): container finished" podID="e95ea0b2-ade1-4aaa-ad67-b85ebde84afa" containerID="9d85f91d03abd760463fd52aa872cb91125a1c38fdd766926c69e6a5b7cea1bf" exitCode=0 Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.700758 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa","Type":"ContainerDied","Data":"9d85f91d03abd760463fd52aa872cb91125a1c38fdd766926c69e6a5b7cea1bf"} Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.703430 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"acea8227-6d95-4c5f-bba7-8e954701de28","Type":"ContainerStarted","Data":"7e070c9755e5ad9af8668c00b5bf480017de2af7960a85618b4e4f84d680cbc7"} Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.705111 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"58187703-2c52-4f99-8d9a-65306c90c5ed","Type":"ContainerStarted","Data":"6464bc19bbddde9db4e42cd8fc89633f7d21592af757226a8561072a8edc68aa"} Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.735507 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-5dwpd" podStartSLOduration=30.099885395 podStartE2EDuration="34.735483722s" podCreationTimestamp="2026-01-21 17:49:59 +0000 UTC" firstStartedPulling="2026-01-21 17:50:23.959824436 +0000 UTC m=+1050.586114459" lastFinishedPulling="2026-01-21 17:50:28.595422763 +0000 UTC m=+1055.221712786" observedRunningTime="2026-01-21 17:50:33.728880357 +0000 UTC m=+1060.355170390" watchObservedRunningTime="2026-01-21 17:50:33.735483722 +0000 UTC m=+1060.361773745" Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.754187 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=23.126505211 podStartE2EDuration="31.754165926s" podCreationTimestamp="2026-01-21 17:50:02 +0000 UTC" firstStartedPulling="2026-01-21 17:50:23.959763214 +0000 UTC m=+1050.586053237" lastFinishedPulling="2026-01-21 17:50:32.587423939 +0000 UTC m=+1059.213713952" observedRunningTime="2026-01-21 17:50:33.751521742 +0000 UTC m=+1060.377811775" watchObservedRunningTime="2026-01-21 17:50:33.754165926 +0000 UTC m=+1060.380455959" Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.783085 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=26.145689128 podStartE2EDuration="34.783064476s" podCreationTimestamp="2026-01-21 17:49:59 +0000 UTC" firstStartedPulling="2026-01-21 17:50:23.953033065 +0000 UTC m=+1050.579323088" lastFinishedPulling="2026-01-21 17:50:32.590408413 +0000 UTC m=+1059.216698436" observedRunningTime="2026-01-21 17:50:33.772286434 +0000 UTC m=+1060.398576457" watchObservedRunningTime="2026-01-21 17:50:33.783064476 +0000 UTC m=+1060.409354499" Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.932186 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:33 crc kubenswrapper[4799]: I0121 17:50:33.969907 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.092801 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.092857 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.129926 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.713924 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"48f0f966-0779-4959-884e-eae4ed66e969","Type":"ContainerStarted","Data":"c44dea80f4b6b10d56559fe49cb3b1af988bd74e190232574355f35b1495761d"} Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.716534 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e95ea0b2-ade1-4aaa-ad67-b85ebde84afa","Type":"ContainerStarted","Data":"956f2f87263ad4f4da793c3b5d5b476983b5085c7ac73c61352b7905498add8a"} Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.718917 4799 generic.go:334] "Generic (PLEG): container finished" podID="04f9c729-36bb-4aa5-9060-af5b0666b196" containerID="af7ad0fef324f139b2237deaaacd749979e51abc2502765b99a23b5dd7f98068" exitCode=0 Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.719055 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"04f9c729-36bb-4aa5-9060-af5b0666b196","Type":"ContainerDied","Data":"af7ad0fef324f139b2237deaaacd749979e51abc2502765b99a23b5dd7f98068"} Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.720266 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.774607 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.780286 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 21 17:50:34 crc kubenswrapper[4799]: I0121 17:50:34.819914 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=36.314818787 podStartE2EDuration="42.81989123s" podCreationTimestamp="2026-01-21 17:49:52 +0000 UTC" firstStartedPulling="2026-01-21 17:50:22.089111295 +0000 UTC m=+1048.715401328" lastFinishedPulling="2026-01-21 17:50:28.594183748 +0000 UTC m=+1055.220473771" observedRunningTime="2026-01-21 17:50:34.814681264 +0000 UTC m=+1061.440971287" watchObservedRunningTime="2026-01-21 17:50:34.81989123 +0000 UTC m=+1061.446181253" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.107180 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6749c445df-26v2l"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.219242 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78bf94944f-splp6"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.241870 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.246532 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.270267 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-bxtjr"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.276090 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.285352 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78bf94944f-splp6"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.298374 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.299362 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-bxtjr"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309353 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcpdn\" (UniqueName: \"kubernetes.io/projected/b117b068-807b-4c10-8c30-46648892f87f-kube-api-access-qcpdn\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309474 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-dns-svc\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309507 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b117b068-807b-4c10-8c30-46648892f87f-ovn-rundir\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309558 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-config\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309595 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b117b068-807b-4c10-8c30-46648892f87f-combined-ca-bundle\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309668 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b117b068-807b-4c10-8c30-46648892f87f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309700 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b117b068-807b-4c10-8c30-46648892f87f-config\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309733 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-ovsdbserver-sb\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309777 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqvhs\" (UniqueName: \"kubernetes.io/projected/6266e3d5-e453-43de-9353-84c2d23c23ea-kube-api-access-sqvhs\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.309806 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b117b068-807b-4c10-8c30-46648892f87f-ovs-rundir\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.316686 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.321109 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.326118 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-9dlgm" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.326178 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.326577 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.335411 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.335618 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.391410 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-99796b587-4m4v4"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.416911 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b117b068-807b-4c10-8c30-46648892f87f-ovn-rundir\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.416953 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.416987 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-config\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417007 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e983d617-7cd1-416a-8955-c3d755e4a5b0-config\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417031 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e983d617-7cd1-416a-8955-c3d755e4a5b0-scripts\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417049 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e983d617-7cd1-416a-8955-c3d755e4a5b0-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417067 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b117b068-807b-4c10-8c30-46648892f87f-combined-ca-bundle\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417156 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2cfd\" (UniqueName: \"kubernetes.io/projected/e983d617-7cd1-416a-8955-c3d755e4a5b0-kube-api-access-t2cfd\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417178 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b117b068-807b-4c10-8c30-46648892f87f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417201 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b117b068-807b-4c10-8c30-46648892f87f-config\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417227 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-ovsdbserver-sb\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417255 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqvhs\" (UniqueName: \"kubernetes.io/projected/6266e3d5-e453-43de-9353-84c2d23c23ea-kube-api-access-sqvhs\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417272 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417296 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b117b068-807b-4c10-8c30-46648892f87f-ovs-rundir\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417322 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcpdn\" (UniqueName: \"kubernetes.io/projected/b117b068-807b-4c10-8c30-46648892f87f-kube-api-access-qcpdn\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417354 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.417387 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-dns-svc\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.418804 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/b117b068-807b-4c10-8c30-46648892f87f-ovn-rundir\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.418851 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-config\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.419112 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-dns-svc\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.419224 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/b117b068-807b-4c10-8c30-46648892f87f-ovs-rundir\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.419721 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b117b068-807b-4c10-8c30-46648892f87f-config\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.420168 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-ovsdbserver-sb\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.429581 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b117b068-807b-4c10-8c30-46648892f87f-combined-ca-bundle\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.436904 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b117b068-807b-4c10-8c30-46648892f87f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.437072 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f6d45dc65-7z99n"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.442061 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqvhs\" (UniqueName: \"kubernetes.io/projected/6266e3d5-e453-43de-9353-84c2d23c23ea-kube-api-access-sqvhs\") pod \"dnsmasq-dns-78bf94944f-splp6\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.443871 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f6d45dc65-7z99n"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.444012 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.451746 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.464830 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcpdn\" (UniqueName: \"kubernetes.io/projected/b117b068-807b-4c10-8c30-46648892f87f-kube-api-access-qcpdn\") pod \"ovn-controller-metrics-bxtjr\" (UID: \"b117b068-807b-4c10-8c30-46648892f87f\") " pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.520188 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.520675 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.520917 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7krgl\" (UniqueName: \"kubernetes.io/projected/4b8250d4-2239-43f2-ba4e-2b18eda69202-kube-api-access-7krgl\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.521102 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.521274 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-dns-svc\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.521381 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.521566 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.521694 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e983d617-7cd1-416a-8955-c3d755e4a5b0-config\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.521816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e983d617-7cd1-416a-8955-c3d755e4a5b0-scripts\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.521941 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e983d617-7cd1-416a-8955-c3d755e4a5b0-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.522073 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-config\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.522251 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2cfd\" (UniqueName: \"kubernetes.io/projected/e983d617-7cd1-416a-8955-c3d755e4a5b0-kube-api-access-t2cfd\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.523444 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e983d617-7cd1-416a-8955-c3d755e4a5b0-config\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.524114 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e983d617-7cd1-416a-8955-c3d755e4a5b0-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.525076 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e983d617-7cd1-416a-8955-c3d755e4a5b0-scripts\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.527077 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.529297 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.529577 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e983d617-7cd1-416a-8955-c3d755e4a5b0-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.544679 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2cfd\" (UniqueName: \"kubernetes.io/projected/e983d617-7cd1-416a-8955-c3d755e4a5b0-kube-api-access-t2cfd\") pod \"ovn-northd-0\" (UID: \"e983d617-7cd1-416a-8955-c3d755e4a5b0\") " pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.578298 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.594675 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.621889 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-bxtjr" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623022 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-dns-svc\") pod \"444bcff7-378c-48a7-8f97-d637df27d7e1\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623110 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckhhd\" (UniqueName: \"kubernetes.io/projected/444bcff7-378c-48a7-8f97-d637df27d7e1-kube-api-access-ckhhd\") pod \"444bcff7-378c-48a7-8f97-d637df27d7e1\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623178 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-config\") pod \"444bcff7-378c-48a7-8f97-d637df27d7e1\" (UID: \"444bcff7-378c-48a7-8f97-d637df27d7e1\") " Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623530 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623586 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7krgl\" (UniqueName: \"kubernetes.io/projected/4b8250d4-2239-43f2-ba4e-2b18eda69202-kube-api-access-7krgl\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623621 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-dns-svc\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623641 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623701 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-config\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.623931 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "444bcff7-378c-48a7-8f97-d637df27d7e1" (UID: "444bcff7-378c-48a7-8f97-d637df27d7e1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.624474 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-config" (OuterVolumeSpecName: "config") pod "444bcff7-378c-48a7-8f97-d637df27d7e1" (UID: "444bcff7-378c-48a7-8f97-d637df27d7e1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.624531 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-config\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.625905 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.625937 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-dns-svc\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.625983 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.628547 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/444bcff7-378c-48a7-8f97-d637df27d7e1-kube-api-access-ckhhd" (OuterVolumeSpecName: "kube-api-access-ckhhd") pod "444bcff7-378c-48a7-8f97-d637df27d7e1" (UID: "444bcff7-378c-48a7-8f97-d637df27d7e1"). InnerVolumeSpecName "kube-api-access-ckhhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.653889 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7krgl\" (UniqueName: \"kubernetes.io/projected/4b8250d4-2239-43f2-ba4e-2b18eda69202-kube-api-access-7krgl\") pod \"dnsmasq-dns-5f6d45dc65-7z99n\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.662176 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.724565 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.736183 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6749c445df-26v2l" event={"ID":"444bcff7-378c-48a7-8f97-d637df27d7e1","Type":"ContainerDied","Data":"8e8977c59d51653ec0eb19843038313aa1a6a74a8f5ae332cb7365f42154fccd"} Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.736271 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6749c445df-26v2l" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.752911 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-99796b587-4m4v4" event={"ID":"5cd788d2-0579-43c0-a7ef-a22ff27d8e13","Type":"ContainerDied","Data":"209a4a41c8313172d1978b44078741105600902b6f0737aa4e1515e21da57f4e"} Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.753016 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-99796b587-4m4v4" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.755875 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"04f9c729-36bb-4aa5-9060-af5b0666b196","Type":"ContainerStarted","Data":"347c4f00516dd4cb471be99d0ff9246d329e670e7f9d202805ecc19e233ecdce"} Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.774078 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.774119 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckhhd\" (UniqueName: \"kubernetes.io/projected/444bcff7-378c-48a7-8f97-d637df27d7e1-kube-api-access-ckhhd\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.774146 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/444bcff7-378c-48a7-8f97-d637df27d7e1-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.822626 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.875486 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-dns-svc\") pod \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.875655 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-config\") pod \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.875742 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95dz2\" (UniqueName: \"kubernetes.io/projected/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-kube-api-access-95dz2\") pod \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\" (UID: \"5cd788d2-0579-43c0-a7ef-a22ff27d8e13\") " Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.877713 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-config" (OuterVolumeSpecName: "config") pod "5cd788d2-0579-43c0-a7ef-a22ff27d8e13" (UID: "5cd788d2-0579-43c0-a7ef-a22ff27d8e13"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.877855 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5cd788d2-0579-43c0-a7ef-a22ff27d8e13" (UID: "5cd788d2-0579-43c0-a7ef-a22ff27d8e13"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.879588 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6749c445df-26v2l"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.886962 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-kube-api-access-95dz2" (OuterVolumeSpecName: "kube-api-access-95dz2") pod "5cd788d2-0579-43c0-a7ef-a22ff27d8e13" (UID: "5cd788d2-0579-43c0-a7ef-a22ff27d8e13"). InnerVolumeSpecName "kube-api-access-95dz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.893506 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6749c445df-26v2l"] Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.907310 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=40.202600239 podStartE2EDuration="44.907280493s" podCreationTimestamp="2026-01-21 17:49:51 +0000 UTC" firstStartedPulling="2026-01-21 17:50:23.969366243 +0000 UTC m=+1050.595656266" lastFinishedPulling="2026-01-21 17:50:28.674046497 +0000 UTC m=+1055.300336520" observedRunningTime="2026-01-21 17:50:35.906669476 +0000 UTC m=+1062.532959509" watchObservedRunningTime="2026-01-21 17:50:35.907280493 +0000 UTC m=+1062.533570516" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.977721 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.977765 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95dz2\" (UniqueName: \"kubernetes.io/projected/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-kube-api-access-95dz2\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:35 crc kubenswrapper[4799]: I0121 17:50:35.977777 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cd788d2-0579-43c0-a7ef-a22ff27d8e13-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.112701 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-99796b587-4m4v4"] Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.122516 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-99796b587-4m4v4"] Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.215368 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="444bcff7-378c-48a7-8f97-d637df27d7e1" path="/var/lib/kubelet/pods/444bcff7-378c-48a7-8f97-d637df27d7e1/volumes" Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.215742 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cd788d2-0579-43c0-a7ef-a22ff27d8e13" path="/var/lib/kubelet/pods/5cd788d2-0579-43c0-a7ef-a22ff27d8e13/volumes" Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.285828 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78bf94944f-splp6"] Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.362007 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 21 17:50:36 crc kubenswrapper[4799]: W0121 17:50:36.386365 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode983d617_7cd1_416a_8955_c3d755e4a5b0.slice/crio-d1d7456e20703d7e6150a8d507f60c23746372ccf0ae0a6180cd1a9a35627bc7 WatchSource:0}: Error finding container d1d7456e20703d7e6150a8d507f60c23746372ccf0ae0a6180cd1a9a35627bc7: Status 404 returned error can't find the container with id d1d7456e20703d7e6150a8d507f60c23746372ccf0ae0a6180cd1a9a35627bc7 Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.433508 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-bxtjr"] Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.489234 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f6d45dc65-7z99n"] Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.769120 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-bxtjr" event={"ID":"b117b068-807b-4c10-8c30-46648892f87f","Type":"ContainerStarted","Data":"724a1e6fc15823bf33cffbd985494ccda9e13f368af356899b04547cd05d113f"} Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.771878 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bf94944f-splp6" event={"ID":"6266e3d5-e453-43de-9353-84c2d23c23ea","Type":"ContainerStarted","Data":"95cf33382a1d04bf26f3e8044d30a9526342e71d201301b256293c34929cd5ae"} Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.771949 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bf94944f-splp6" event={"ID":"6266e3d5-e453-43de-9353-84c2d23c23ea","Type":"ContainerStarted","Data":"c3bf44a27069777180a70d5427e689950b15c91d6c116d52dff42fba7ce1fa6f"} Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.778770 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" event={"ID":"4b8250d4-2239-43f2-ba4e-2b18eda69202","Type":"ContainerStarted","Data":"6fa23a265b8db7a5e76627d2039f964cbba4ff4eb0e59af76a5c5cc4b19cf7c9"} Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.778828 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" event={"ID":"4b8250d4-2239-43f2-ba4e-2b18eda69202","Type":"ContainerStarted","Data":"56f99ed86ca1ec8c13b175431fa4964f5638c69f0aad20f47bca44f97d518739"} Jan 21 17:50:36 crc kubenswrapper[4799]: I0121 17:50:36.782825 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e983d617-7cd1-416a-8955-c3d755e4a5b0","Type":"ContainerStarted","Data":"d1d7456e20703d7e6150a8d507f60c23746372ccf0ae0a6180cd1a9a35627bc7"} Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.041000 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f6d45dc65-7z99n"] Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.109579 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-648b6fc9cc-db49n"] Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.111400 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.124958 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-648b6fc9cc-db49n"] Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.210921 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-dns-svc\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.211015 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-sb\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.211058 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q5xj\" (UniqueName: \"kubernetes.io/projected/6df31e83-1f86-4266-be9f-7e8dbfd25922-kube-api-access-6q5xj\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.211103 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-nb\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.211379 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-config\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.313658 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-config\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.313749 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-dns-svc\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.313840 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-sb\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.313897 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q5xj\" (UniqueName: \"kubernetes.io/projected/6df31e83-1f86-4266-be9f-7e8dbfd25922-kube-api-access-6q5xj\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.313967 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-nb\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.315737 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-nb\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.315911 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-sb\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.316736 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-config\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.316809 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-dns-svc\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.336317 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q5xj\" (UniqueName: \"kubernetes.io/projected/6df31e83-1f86-4266-be9f-7e8dbfd25922-kube-api-access-6q5xj\") pod \"dnsmasq-dns-648b6fc9cc-db49n\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.432283 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.799667 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-bxtjr" event={"ID":"b117b068-807b-4c10-8c30-46648892f87f","Type":"ContainerStarted","Data":"96186029e188c0d2df14b5e7643ec79a09b7a9ba9c00feaa222fe3648aac04aa"} Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.802671 4799 generic.go:334] "Generic (PLEG): container finished" podID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerID="95cf33382a1d04bf26f3e8044d30a9526342e71d201301b256293c34929cd5ae" exitCode=0 Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.802760 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bf94944f-splp6" event={"ID":"6266e3d5-e453-43de-9353-84c2d23c23ea","Type":"ContainerDied","Data":"95cf33382a1d04bf26f3e8044d30a9526342e71d201301b256293c34929cd5ae"} Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.802788 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bf94944f-splp6" event={"ID":"6266e3d5-e453-43de-9353-84c2d23c23ea","Type":"ContainerStarted","Data":"2482768cdb5c44f0fc160dd31ab4420eb5a058bfef55b21537971c159f83f351"} Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.804034 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.806864 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" event={"ID":"4b8250d4-2239-43f2-ba4e-2b18eda69202","Type":"ContainerDied","Data":"6fa23a265b8db7a5e76627d2039f964cbba4ff4eb0e59af76a5c5cc4b19cf7c9"} Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.806907 4799 generic.go:334] "Generic (PLEG): container finished" podID="4b8250d4-2239-43f2-ba4e-2b18eda69202" containerID="6fa23a265b8db7a5e76627d2039f964cbba4ff4eb0e59af76a5c5cc4b19cf7c9" exitCode=0 Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.812369 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e983d617-7cd1-416a-8955-c3d755e4a5b0","Type":"ContainerStarted","Data":"61802716fb8e292f9cca99e1ba965a3777cf1800c0a084491a02f0e08ceb62ae"} Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.834298 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-bxtjr" podStartSLOduration=2.834280291 podStartE2EDuration="2.834280291s" podCreationTimestamp="2026-01-21 17:50:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:50:37.827304675 +0000 UTC m=+1064.453594708" watchObservedRunningTime="2026-01-21 17:50:37.834280291 +0000 UTC m=+1064.460570314" Jan 21 17:50:37 crc kubenswrapper[4799]: I0121 17:50:37.883949 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78bf94944f-splp6" podStartSLOduration=2.791904273 podStartE2EDuration="2.883926593s" podCreationTimestamp="2026-01-21 17:50:35 +0000 UTC" firstStartedPulling="2026-01-21 17:50:36.309619271 +0000 UTC m=+1062.935909294" lastFinishedPulling="2026-01-21 17:50:36.401641591 +0000 UTC m=+1063.027931614" observedRunningTime="2026-01-21 17:50:37.856301468 +0000 UTC m=+1064.482591491" watchObservedRunningTime="2026-01-21 17:50:37.883926593 +0000 UTC m=+1064.510216616" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.003462 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-648b6fc9cc-db49n"] Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.079278 4799 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Jan 21 17:50:38 crc kubenswrapper[4799]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/4b8250d4-2239-43f2-ba4e-2b18eda69202/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 21 17:50:38 crc kubenswrapper[4799]: > podSandboxID="56f99ed86ca1ec8c13b175431fa4964f5638c69f0aad20f47bca44f97d518739" Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.080010 4799 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 21 17:50:38 crc kubenswrapper[4799]: container &Container{Name:dnsmasq-dns,Image:38.102.83.30:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n7bh64fh67ch5c4h65bh587h67fh546h7bhc4h688h596h5c7h554h99h8h5dch586h7h5cbh686h55h64bh7dhdbhb6h575h65ch654h658h688h65bq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7krgl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5f6d45dc65-7z99n_openstack(4b8250d4-2239-43f2-ba4e-2b18eda69202): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/4b8250d4-2239-43f2-ba4e-2b18eda69202/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 21 17:50:38 crc kubenswrapper[4799]: > logger="UnhandledError" Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.081195 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/4b8250d4-2239-43f2-ba4e-2b18eda69202/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" podUID="4b8250d4-2239-43f2-ba4e-2b18eda69202" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.239122 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.244712 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.247283 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.247842 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.247998 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.248510 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-zhwp6" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.300318 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.339912 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.339964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/771ea47a-76eb-434d-ac1f-cf6048f08237-lock\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.340188 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2krz\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-kube-api-access-v2krz\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.340435 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.340648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/771ea47a-76eb-434d-ac1f-cf6048f08237-cache\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.442388 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.442768 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.442826 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/771ea47a-76eb-434d-ac1f-cf6048f08237-cache\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.442919 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.442951 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/771ea47a-76eb-434d-ac1f-cf6048f08237-lock\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.442984 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2krz\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-kube-api-access-v2krz\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.443114 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.443154 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.443267 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift podName:771ea47a-76eb-434d-ac1f-cf6048f08237 nodeName:}" failed. No retries permitted until 2026-01-21 17:50:38.943236292 +0000 UTC m=+1065.569526315 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift") pod "swift-storage-0" (UID: "771ea47a-76eb-434d-ac1f-cf6048f08237") : configmap "swift-ring-files" not found Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.443426 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/771ea47a-76eb-434d-ac1f-cf6048f08237-cache\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.443521 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/771ea47a-76eb-434d-ac1f-cf6048f08237-lock\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.468841 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2krz\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-kube-api-access-v2krz\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.473064 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.823582 4799 generic.go:334] "Generic (PLEG): container finished" podID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerID="72125dca36af8bb1595f9e481e26db8856da0c5ddea192593eebbc6397381ffa" exitCode=0 Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.823666 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" event={"ID":"6df31e83-1f86-4266-be9f-7e8dbfd25922","Type":"ContainerDied","Data":"72125dca36af8bb1595f9e481e26db8856da0c5ddea192593eebbc6397381ffa"} Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.823721 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" event={"ID":"6df31e83-1f86-4266-be9f-7e8dbfd25922","Type":"ContainerStarted","Data":"e5290dcdcab23dc63be0e09fbdd695b3a75d071b8d0dbe882deefd5fbdc7a4cb"} Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.826155 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e983d617-7cd1-416a-8955-c3d755e4a5b0","Type":"ContainerStarted","Data":"f103020643119dacf85e15e788b1d2b09fe80e0f6ba76f8f83a8bf7aa54e97b7"} Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.904612 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.820558047 podStartE2EDuration="3.904589195s" podCreationTimestamp="2026-01-21 17:50:35 +0000 UTC" firstStartedPulling="2026-01-21 17:50:36.395593072 +0000 UTC m=+1063.021883095" lastFinishedPulling="2026-01-21 17:50:37.47962422 +0000 UTC m=+1064.105914243" observedRunningTime="2026-01-21 17:50:38.876678672 +0000 UTC m=+1065.502968715" watchObservedRunningTime="2026-01-21 17:50:38.904589195 +0000 UTC m=+1065.530879218" Jan 21 17:50:38 crc kubenswrapper[4799]: I0121 17:50:38.955521 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.958919 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.958951 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 21 17:50:38 crc kubenswrapper[4799]: E0121 17:50:38.958999 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift podName:771ea47a-76eb-434d-ac1f-cf6048f08237 nodeName:}" failed. No retries permitted until 2026-01-21 17:50:39.958979499 +0000 UTC m=+1066.585269522 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift") pod "swift-storage-0" (UID: "771ea47a-76eb-434d-ac1f-cf6048f08237") : configmap "swift-ring-files" not found Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.269646 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.362602 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7krgl\" (UniqueName: \"kubernetes.io/projected/4b8250d4-2239-43f2-ba4e-2b18eda69202-kube-api-access-7krgl\") pod \"4b8250d4-2239-43f2-ba4e-2b18eda69202\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.362810 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-nb\") pod \"4b8250d4-2239-43f2-ba4e-2b18eda69202\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.362888 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-dns-svc\") pod \"4b8250d4-2239-43f2-ba4e-2b18eda69202\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.362977 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-sb\") pod \"4b8250d4-2239-43f2-ba4e-2b18eda69202\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.363094 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-config\") pod \"4b8250d4-2239-43f2-ba4e-2b18eda69202\" (UID: \"4b8250d4-2239-43f2-ba4e-2b18eda69202\") " Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.368490 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b8250d4-2239-43f2-ba4e-2b18eda69202-kube-api-access-7krgl" (OuterVolumeSpecName: "kube-api-access-7krgl") pod "4b8250d4-2239-43f2-ba4e-2b18eda69202" (UID: "4b8250d4-2239-43f2-ba4e-2b18eda69202"). InnerVolumeSpecName "kube-api-access-7krgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.406023 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4b8250d4-2239-43f2-ba4e-2b18eda69202" (UID: "4b8250d4-2239-43f2-ba4e-2b18eda69202"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.465328 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.465361 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7krgl\" (UniqueName: \"kubernetes.io/projected/4b8250d4-2239-43f2-ba4e-2b18eda69202-kube-api-access-7krgl\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.501511 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b8250d4-2239-43f2-ba4e-2b18eda69202" (UID: "4b8250d4-2239-43f2-ba4e-2b18eda69202"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.502733 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-config" (OuterVolumeSpecName: "config") pod "4b8250d4-2239-43f2-ba4e-2b18eda69202" (UID: "4b8250d4-2239-43f2-ba4e-2b18eda69202"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.511325 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4b8250d4-2239-43f2-ba4e-2b18eda69202" (UID: "4b8250d4-2239-43f2-ba4e-2b18eda69202"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.567729 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.567798 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.567811 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b8250d4-2239-43f2-ba4e-2b18eda69202-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.838069 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" event={"ID":"6df31e83-1f86-4266-be9f-7e8dbfd25922","Type":"ContainerStarted","Data":"a7dfc94a5d699e7957cc86850a5491be3a74a7e79229572171d019076121f11c"} Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.840390 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.840376 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d45dc65-7z99n" event={"ID":"4b8250d4-2239-43f2-ba4e-2b18eda69202","Type":"ContainerDied","Data":"56f99ed86ca1ec8c13b175431fa4964f5638c69f0aad20f47bca44f97d518739"} Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.840927 4799 scope.go:117] "RemoveContainer" containerID="6fa23a265b8db7a5e76627d2039f964cbba4ff4eb0e59af76a5c5cc4b19cf7c9" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.842959 4799 generic.go:334] "Generic (PLEG): container finished" podID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerID="5f544d8b4e7c301812a7ecb1256ebbdbbadec5f876ac61b8d332b6d8f7b81167" exitCode=0 Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.842992 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerDied","Data":"5f544d8b4e7c301812a7ecb1256ebbdbbadec5f876ac61b8d332b6d8f7b81167"} Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.844642 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.877443 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" podStartSLOduration=2.877420566 podStartE2EDuration="2.877420566s" podCreationTimestamp="2026-01-21 17:50:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:50:39.861290804 +0000 UTC m=+1066.487580867" watchObservedRunningTime="2026-01-21 17:50:39.877420566 +0000 UTC m=+1066.503710599" Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.963903 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f6d45dc65-7z99n"] Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.973073 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f6d45dc65-7z99n"] Jan 21 17:50:39 crc kubenswrapper[4799]: I0121 17:50:39.976955 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:39 crc kubenswrapper[4799]: E0121 17:50:39.978244 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 21 17:50:39 crc kubenswrapper[4799]: E0121 17:50:39.978270 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 21 17:50:39 crc kubenswrapper[4799]: E0121 17:50:39.978348 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift podName:771ea47a-76eb-434d-ac1f-cf6048f08237 nodeName:}" failed. No retries permitted until 2026-01-21 17:50:41.978311614 +0000 UTC m=+1068.604601867 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift") pod "swift-storage-0" (UID: "771ea47a-76eb-434d-ac1f-cf6048f08237") : configmap "swift-ring-files" not found Jan 21 17:50:40 crc kubenswrapper[4799]: I0121 17:50:40.217234 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b8250d4-2239-43f2-ba4e-2b18eda69202" path="/var/lib/kubelet/pods/4b8250d4-2239-43f2-ba4e-2b18eda69202/volumes" Jan 21 17:50:40 crc kubenswrapper[4799]: I0121 17:50:40.854973 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.024314 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:42 crc kubenswrapper[4799]: E0121 17:50:42.024553 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 21 17:50:42 crc kubenswrapper[4799]: E0121 17:50:42.024689 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 21 17:50:42 crc kubenswrapper[4799]: E0121 17:50:42.024756 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift podName:771ea47a-76eb-434d-ac1f-cf6048f08237 nodeName:}" failed. No retries permitted until 2026-01-21 17:50:46.02473872 +0000 UTC m=+1072.651028743 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift") pod "swift-storage-0" (UID: "771ea47a-76eb-434d-ac1f-cf6048f08237") : configmap "swift-ring-files" not found Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.110362 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-72p7x"] Jan 21 17:50:42 crc kubenswrapper[4799]: E0121 17:50:42.111007 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8250d4-2239-43f2-ba4e-2b18eda69202" containerName="init" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.111533 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8250d4-2239-43f2-ba4e-2b18eda69202" containerName="init" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.111953 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8250d4-2239-43f2-ba4e-2b18eda69202" containerName="init" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.113231 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.116400 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.116752 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.117210 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.138019 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-72p7x"] Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.239072 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-etc-swift\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.239202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-dispersionconf\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.239262 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-swiftconf\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.239386 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-scripts\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.239437 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-ring-data-devices\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.239468 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-combined-ca-bundle\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.239510 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s2xr\" (UniqueName: \"kubernetes.io/projected/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-kube-api-access-6s2xr\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.341192 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-dispersionconf\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.341325 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-swiftconf\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.341381 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-scripts\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.341405 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-ring-data-devices\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.341441 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-combined-ca-bundle\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.341469 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s2xr\" (UniqueName: \"kubernetes.io/projected/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-kube-api-access-6s2xr\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.341587 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-etc-swift\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.342260 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-etc-swift\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.342519 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-scripts\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.342540 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-ring-data-devices\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.347782 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-dispersionconf\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.348667 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-combined-ca-bundle\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.352161 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-swiftconf\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.362659 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s2xr\" (UniqueName: \"kubernetes.io/projected/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-kube-api-access-6s2xr\") pod \"swift-ring-rebalance-72p7x\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.460869 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:50:42 crc kubenswrapper[4799]: I0121 17:50:42.912483 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-72p7x"] Jan 21 17:50:42 crc kubenswrapper[4799]: W0121 17:50:42.928383 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9b76963_d66a_43b7_9f1a_ef2a18ef6d02.slice/crio-2135408d978a51424cfa6a1a820a03cb15e63b85b808cf4f44556097da29c5ff WatchSource:0}: Error finding container 2135408d978a51424cfa6a1a820a03cb15e63b85b808cf4f44556097da29c5ff: Status 404 returned error can't find the container with id 2135408d978a51424cfa6a1a820a03cb15e63b85b808cf4f44556097da29c5ff Jan 21 17:50:43 crc kubenswrapper[4799]: I0121 17:50:43.004677 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 21 17:50:43 crc kubenswrapper[4799]: I0121 17:50:43.004756 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 21 17:50:43 crc kubenswrapper[4799]: I0121 17:50:43.196960 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 21 17:50:43 crc kubenswrapper[4799]: I0121 17:50:43.896373 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-72p7x" event={"ID":"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02","Type":"ContainerStarted","Data":"2135408d978a51424cfa6a1a820a03cb15e63b85b808cf4f44556097da29c5ff"} Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.033570 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.222256 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5362-account-create-update-9xw4s"] Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.223604 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.226814 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.241806 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5362-account-create-update-9xw4s"] Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.286403 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39b6f6dd-e24e-4398-87d7-0fc790374a12-operator-scripts\") pod \"keystone-5362-account-create-update-9xw4s\" (UID: \"39b6f6dd-e24e-4398-87d7-0fc790374a12\") " pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.286757 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fr59\" (UniqueName: \"kubernetes.io/projected/39b6f6dd-e24e-4398-87d7-0fc790374a12-kube-api-access-9fr59\") pod \"keystone-5362-account-create-update-9xw4s\" (UID: \"39b6f6dd-e24e-4398-87d7-0fc790374a12\") " pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.292394 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-6xr5d"] Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.293989 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.302608 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-6xr5d"] Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.389566 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fr59\" (UniqueName: \"kubernetes.io/projected/39b6f6dd-e24e-4398-87d7-0fc790374a12-kube-api-access-9fr59\") pod \"keystone-5362-account-create-update-9xw4s\" (UID: \"39b6f6dd-e24e-4398-87d7-0fc790374a12\") " pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.389639 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d517139c-ff8f-4320-8901-06ff9955241c-operator-scripts\") pod \"keystone-db-create-6xr5d\" (UID: \"d517139c-ff8f-4320-8901-06ff9955241c\") " pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.389720 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqwhm\" (UniqueName: \"kubernetes.io/projected/d517139c-ff8f-4320-8901-06ff9955241c-kube-api-access-mqwhm\") pod \"keystone-db-create-6xr5d\" (UID: \"d517139c-ff8f-4320-8901-06ff9955241c\") " pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.389985 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39b6f6dd-e24e-4398-87d7-0fc790374a12-operator-scripts\") pod \"keystone-5362-account-create-update-9xw4s\" (UID: \"39b6f6dd-e24e-4398-87d7-0fc790374a12\") " pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.391008 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39b6f6dd-e24e-4398-87d7-0fc790374a12-operator-scripts\") pod \"keystone-5362-account-create-update-9xw4s\" (UID: \"39b6f6dd-e24e-4398-87d7-0fc790374a12\") " pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.421958 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fr59\" (UniqueName: \"kubernetes.io/projected/39b6f6dd-e24e-4398-87d7-0fc790374a12-kube-api-access-9fr59\") pod \"keystone-5362-account-create-update-9xw4s\" (UID: \"39b6f6dd-e24e-4398-87d7-0fc790374a12\") " pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.480313 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-rg5tc"] Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.481770 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rg5tc" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.491554 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-rg5tc"] Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.492179 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d517139c-ff8f-4320-8901-06ff9955241c-operator-scripts\") pod \"keystone-db-create-6xr5d\" (UID: \"d517139c-ff8f-4320-8901-06ff9955241c\") " pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.492322 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqwhm\" (UniqueName: \"kubernetes.io/projected/d517139c-ff8f-4320-8901-06ff9955241c-kube-api-access-mqwhm\") pod \"keystone-db-create-6xr5d\" (UID: \"d517139c-ff8f-4320-8901-06ff9955241c\") " pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.493945 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d517139c-ff8f-4320-8901-06ff9955241c-operator-scripts\") pod \"keystone-db-create-6xr5d\" (UID: \"d517139c-ff8f-4320-8901-06ff9955241c\") " pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.539892 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqwhm\" (UniqueName: \"kubernetes.io/projected/d517139c-ff8f-4320-8901-06ff9955241c-kube-api-access-mqwhm\") pod \"keystone-db-create-6xr5d\" (UID: \"d517139c-ff8f-4320-8901-06ff9955241c\") " pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.550666 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.598857 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vhlv\" (UniqueName: \"kubernetes.io/projected/693423df-cd0b-4d1e-a58d-ec5f062db23d-kube-api-access-6vhlv\") pod \"placement-db-create-rg5tc\" (UID: \"693423df-cd0b-4d1e-a58d-ec5f062db23d\") " pod="openstack/placement-db-create-rg5tc" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.600351 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/693423df-cd0b-4d1e-a58d-ec5f062db23d-operator-scripts\") pod \"placement-db-create-rg5tc\" (UID: \"693423df-cd0b-4d1e-a58d-ec5f062db23d\") " pod="openstack/placement-db-create-rg5tc" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.612096 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.633637 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-83af-account-create-update-tz6l9"] Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.639560 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.648415 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.658241 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.660531 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.683184 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-83af-account-create-update-tz6l9"] Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.732528 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082ee676-0c0e-48fc-a537-aac7e95dd4ae-operator-scripts\") pod \"placement-83af-account-create-update-tz6l9\" (UID: \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\") " pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.732848 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vhlv\" (UniqueName: \"kubernetes.io/projected/693423df-cd0b-4d1e-a58d-ec5f062db23d-kube-api-access-6vhlv\") pod \"placement-db-create-rg5tc\" (UID: \"693423df-cd0b-4d1e-a58d-ec5f062db23d\") " pod="openstack/placement-db-create-rg5tc" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.732896 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h585g\" (UniqueName: \"kubernetes.io/projected/082ee676-0c0e-48fc-a537-aac7e95dd4ae-kube-api-access-h585g\") pod \"placement-83af-account-create-update-tz6l9\" (UID: \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\") " pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.732993 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/693423df-cd0b-4d1e-a58d-ec5f062db23d-operator-scripts\") pod \"placement-db-create-rg5tc\" (UID: \"693423df-cd0b-4d1e-a58d-ec5f062db23d\") " pod="openstack/placement-db-create-rg5tc" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.733901 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/693423df-cd0b-4d1e-a58d-ec5f062db23d-operator-scripts\") pod \"placement-db-create-rg5tc\" (UID: \"693423df-cd0b-4d1e-a58d-ec5f062db23d\") " pod="openstack/placement-db-create-rg5tc" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.781084 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vhlv\" (UniqueName: \"kubernetes.io/projected/693423df-cd0b-4d1e-a58d-ec5f062db23d-kube-api-access-6vhlv\") pod \"placement-db-create-rg5tc\" (UID: \"693423df-cd0b-4d1e-a58d-ec5f062db23d\") " pod="openstack/placement-db-create-rg5tc" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.814629 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rg5tc" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.834689 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082ee676-0c0e-48fc-a537-aac7e95dd4ae-operator-scripts\") pod \"placement-83af-account-create-update-tz6l9\" (UID: \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\") " pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.834785 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h585g\" (UniqueName: \"kubernetes.io/projected/082ee676-0c0e-48fc-a537-aac7e95dd4ae-kube-api-access-h585g\") pod \"placement-83af-account-create-update-tz6l9\" (UID: \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\") " pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.835980 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082ee676-0c0e-48fc-a537-aac7e95dd4ae-operator-scripts\") pod \"placement-83af-account-create-update-tz6l9\" (UID: \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\") " pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.852456 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h585g\" (UniqueName: \"kubernetes.io/projected/082ee676-0c0e-48fc-a537-aac7e95dd4ae-kube-api-access-h585g\") pod \"placement-83af-account-create-update-tz6l9\" (UID: \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\") " pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:44 crc kubenswrapper[4799]: I0121 17:50:44.928722 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 21 17:50:45 crc kubenswrapper[4799]: I0121 17:50:45.053636 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:45 crc kubenswrapper[4799]: I0121 17:50:45.596291 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.027942 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 21 17:50:46 crc kubenswrapper[4799]: E0121 17:50:46.028318 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 21 17:50:46 crc kubenswrapper[4799]: E0121 17:50:46.028333 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 21 17:50:46 crc kubenswrapper[4799]: E0121 17:50:46.028379 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift podName:771ea47a-76eb-434d-ac1f-cf6048f08237 nodeName:}" failed. No retries permitted until 2026-01-21 17:50:54.028361652 +0000 UTC m=+1080.654651675 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift") pod "swift-storage-0" (UID: "771ea47a-76eb-434d-ac1f-cf6048f08237") : configmap "swift-ring-files" not found Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.028832 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.840383 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-25rbc"] Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.841652 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-25rbc" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.846716 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc6f84a8-5008-4534-b894-1caa2f8585da-operator-scripts\") pod \"watcher-db-create-25rbc\" (UID: \"dc6f84a8-5008-4534-b894-1caa2f8585da\") " pod="openstack/watcher-db-create-25rbc" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.846884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhks6\" (UniqueName: \"kubernetes.io/projected/dc6f84a8-5008-4534-b894-1caa2f8585da-kube-api-access-qhks6\") pod \"watcher-db-create-25rbc\" (UID: \"dc6f84a8-5008-4534-b894-1caa2f8585da\") " pod="openstack/watcher-db-create-25rbc" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.852862 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-25rbc"] Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.904978 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.948421 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhks6\" (UniqueName: \"kubernetes.io/projected/dc6f84a8-5008-4534-b894-1caa2f8585da-kube-api-access-qhks6\") pod \"watcher-db-create-25rbc\" (UID: \"dc6f84a8-5008-4534-b894-1caa2f8585da\") " pod="openstack/watcher-db-create-25rbc" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.948565 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc6f84a8-5008-4534-b894-1caa2f8585da-operator-scripts\") pod \"watcher-db-create-25rbc\" (UID: \"dc6f84a8-5008-4534-b894-1caa2f8585da\") " pod="openstack/watcher-db-create-25rbc" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.949679 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc6f84a8-5008-4534-b894-1caa2f8585da-operator-scripts\") pod \"watcher-db-create-25rbc\" (UID: \"dc6f84a8-5008-4534-b894-1caa2f8585da\") " pod="openstack/watcher-db-create-25rbc" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.977751 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhks6\" (UniqueName: \"kubernetes.io/projected/dc6f84a8-5008-4534-b894-1caa2f8585da-kube-api-access-qhks6\") pod \"watcher-db-create-25rbc\" (UID: \"dc6f84a8-5008-4534-b894-1caa2f8585da\") " pod="openstack/watcher-db-create-25rbc" Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.990190 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-4a30-account-create-update-wqr8v"] Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.991505 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-4a30-account-create-update-wqr8v"] Jan 21 17:50:46 crc kubenswrapper[4799]: I0121 17:50:46.991596 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.001490 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.153766 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747c974f-6219-4bb3-a6d0-e657bd201d5d-operator-scripts\") pod \"watcher-4a30-account-create-update-wqr8v\" (UID: \"747c974f-6219-4bb3-a6d0-e657bd201d5d\") " pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.153877 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbdc6\" (UniqueName: \"kubernetes.io/projected/747c974f-6219-4bb3-a6d0-e657bd201d5d-kube-api-access-sbdc6\") pod \"watcher-4a30-account-create-update-wqr8v\" (UID: \"747c974f-6219-4bb3-a6d0-e657bd201d5d\") " pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.208139 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-25rbc" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.256063 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747c974f-6219-4bb3-a6d0-e657bd201d5d-operator-scripts\") pod \"watcher-4a30-account-create-update-wqr8v\" (UID: \"747c974f-6219-4bb3-a6d0-e657bd201d5d\") " pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.256219 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbdc6\" (UniqueName: \"kubernetes.io/projected/747c974f-6219-4bb3-a6d0-e657bd201d5d-kube-api-access-sbdc6\") pod \"watcher-4a30-account-create-update-wqr8v\" (UID: \"747c974f-6219-4bb3-a6d0-e657bd201d5d\") " pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.257732 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747c974f-6219-4bb3-a6d0-e657bd201d5d-operator-scripts\") pod \"watcher-4a30-account-create-update-wqr8v\" (UID: \"747c974f-6219-4bb3-a6d0-e657bd201d5d\") " pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.274492 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbdc6\" (UniqueName: \"kubernetes.io/projected/747c974f-6219-4bb3-a6d0-e657bd201d5d-kube-api-access-sbdc6\") pod \"watcher-4a30-account-create-update-wqr8v\" (UID: \"747c974f-6219-4bb3-a6d0-e657bd201d5d\") " pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.339978 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.435461 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.494157 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78bf94944f-splp6"] Jan 21 17:50:47 crc kubenswrapper[4799]: I0121 17:50:47.494617 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78bf94944f-splp6" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerName="dnsmasq-dns" containerID="cri-o://2482768cdb5c44f0fc160dd31ab4420eb5a058bfef55b21537971c159f83f351" gracePeriod=10 Jan 21 17:50:49 crc kubenswrapper[4799]: I0121 17:50:49.940779 4799 generic.go:334] "Generic (PLEG): container finished" podID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerID="2482768cdb5c44f0fc160dd31ab4420eb5a058bfef55b21537971c159f83f351" exitCode=0 Jan 21 17:50:49 crc kubenswrapper[4799]: I0121 17:50:49.940852 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bf94944f-splp6" event={"ID":"6266e3d5-e453-43de-9353-84c2d23c23ea","Type":"ContainerDied","Data":"2482768cdb5c44f0fc160dd31ab4420eb5a058bfef55b21537971c159f83f351"} Jan 21 17:50:50 crc kubenswrapper[4799]: I0121 17:50:50.596162 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-78bf94944f-splp6" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: connect: connection refused" Jan 21 17:50:50 crc kubenswrapper[4799]: I0121 17:50:50.727758 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.636081 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-mxsmt"] Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.638021 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mxsmt" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.641017 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.672007 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-mxsmt"] Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.749644 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k52xc\" (UniqueName: \"kubernetes.io/projected/eea46be9-f407-41a1-a0c0-e80caa761e8d-kube-api-access-k52xc\") pod \"root-account-create-update-mxsmt\" (UID: \"eea46be9-f407-41a1-a0c0-e80caa761e8d\") " pod="openstack/root-account-create-update-mxsmt" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.749884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eea46be9-f407-41a1-a0c0-e80caa761e8d-operator-scripts\") pod \"root-account-create-update-mxsmt\" (UID: \"eea46be9-f407-41a1-a0c0-e80caa761e8d\") " pod="openstack/root-account-create-update-mxsmt" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.851293 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k52xc\" (UniqueName: \"kubernetes.io/projected/eea46be9-f407-41a1-a0c0-e80caa761e8d-kube-api-access-k52xc\") pod \"root-account-create-update-mxsmt\" (UID: \"eea46be9-f407-41a1-a0c0-e80caa761e8d\") " pod="openstack/root-account-create-update-mxsmt" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.851446 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eea46be9-f407-41a1-a0c0-e80caa761e8d-operator-scripts\") pod \"root-account-create-update-mxsmt\" (UID: \"eea46be9-f407-41a1-a0c0-e80caa761e8d\") " pod="openstack/root-account-create-update-mxsmt" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.852589 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eea46be9-f407-41a1-a0c0-e80caa761e8d-operator-scripts\") pod \"root-account-create-update-mxsmt\" (UID: \"eea46be9-f407-41a1-a0c0-e80caa761e8d\") " pod="openstack/root-account-create-update-mxsmt" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.872238 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k52xc\" (UniqueName: \"kubernetes.io/projected/eea46be9-f407-41a1-a0c0-e80caa761e8d-kube-api-access-k52xc\") pod \"root-account-create-update-mxsmt\" (UID: \"eea46be9-f407-41a1-a0c0-e80caa761e8d\") " pod="openstack/root-account-create-update-mxsmt" Jan 21 17:50:51 crc kubenswrapper[4799]: I0121 17:50:51.963148 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mxsmt" Jan 21 17:50:54 crc kubenswrapper[4799]: E0121 17:50:54.015268 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:1b555e21bba7c609111ace4380382a696d9aceeb6e9816bf9023b8f689b6c741" Jan 21 17:50:54 crc kubenswrapper[4799]: E0121 17:50:54.015676 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus,Image:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:1b555e21bba7c609111ace4380382a696d9aceeb6e9816bf9023b8f689b6c741,Command:[],Args:[--config.file=/etc/prometheus/config_out/prometheus.env.yaml --web.enable-lifecycle --web.enable-remote-write-receiver --web.route-prefix=/ --storage.tsdb.retention.time=24h --storage.tsdb.path=/prometheus --web.config.file=/etc/prometheus/web_config/web-config.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:web,HostPort:0,ContainerPort:9090,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-out,ReadOnly:true,MountPath:/etc/prometheus/config_out,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tls-assets,ReadOnly:true,MountPath:/etc/prometheus/certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-metric-storage-db,ReadOnly:false,MountPath:/prometheus,SubPath:prometheus-db,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-metric-storage-rulefiles-0,ReadOnly:true,MountPath:/etc/prometheus/rules/prometheus-metric-storage-rulefiles-0,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-metric-storage-rulefiles-1,ReadOnly:true,MountPath:/etc/prometheus/rules/prometheus-metric-storage-rulefiles-1,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-metric-storage-rulefiles-2,ReadOnly:true,MountPath:/etc/prometheus/rules/prometheus-metric-storage-rulefiles-2,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:web-config,ReadOnly:true,MountPath:/etc/prometheus/web_config/web-config.yaml,SubPath:web-config.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bxx4k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/-/healthy,Port:{1 0 web},Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:6,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/-/ready,Port:{1 0 web},Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/-/ready,Port:{1 0 web},Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:15,SuccessThreshold:1,FailureThreshold:60,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod prometheus-metric-storage-0_openstack(c3dbd916-66ac-4f70-a011-68d4195c5c44): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 21 17:50:54 crc kubenswrapper[4799]: I0121 17:50:54.104376 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:50:54 crc kubenswrapper[4799]: E0121 17:50:54.104629 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 21 17:50:54 crc kubenswrapper[4799]: E0121 17:50:54.104648 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 21 17:50:54 crc kubenswrapper[4799]: E0121 17:50:54.104704 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift podName:771ea47a-76eb-434d-ac1f-cf6048f08237 nodeName:}" failed. No retries permitted until 2026-01-21 17:51:10.104685932 +0000 UTC m=+1096.730975955 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift") pod "swift-storage-0" (UID: "771ea47a-76eb-434d-ac1f-cf6048f08237") : configmap "swift-ring-files" not found Jan 21 17:50:55 crc kubenswrapper[4799]: E0121 17:50:55.354259 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-swift-proxy-server:watcher_latest" Jan 21 17:50:55 crc kubenswrapper[4799]: E0121 17:50:55.354570 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-swift-proxy-server:watcher_latest" Jan 21 17:50:55 crc kubenswrapper[4799]: E0121 17:50:55.354685 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:swift-ring-rebalance,Image:38.102.83.30:5001/podified-master-centos10/openstack-swift-proxy-server:watcher_latest,Command:[/usr/local/bin/swift-ring-tool all],Args:[],WorkingDir:/etc/swift,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CM_NAME,Value:swift-ring-files,ValueFrom:nil,},EnvVar{Name:NAMESPACE,Value:openstack,ValueFrom:nil,},EnvVar{Name:OWNER_APIVERSION,Value:swift.openstack.org/v1beta1,ValueFrom:nil,},EnvVar{Name:OWNER_KIND,Value:SwiftRing,ValueFrom:nil,},EnvVar{Name:OWNER_NAME,Value:swift-ring,ValueFrom:nil,},EnvVar{Name:OWNER_UID,Value:89ec02f5-93f5-41c2-9d39-82f9b1452dd5,ValueFrom:nil,},EnvVar{Name:SWIFT_MIN_PART_HOURS,Value:1,ValueFrom:nil,},EnvVar{Name:SWIFT_PART_POWER,Value:10,ValueFrom:nil,},EnvVar{Name:SWIFT_REPLICAS,Value:1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/swift-ring-tool,SubPath:swift-ring-tool,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:swiftconf,ReadOnly:true,MountPath:/etc/swift/swift.conf,SubPath:swift.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-swift,ReadOnly:false,MountPath:/etc/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ring-data-devices,ReadOnly:true,MountPath:/var/lib/config-data/ring-devices,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dispersionconf,ReadOnly:true,MountPath:/etc/swift/dispersion.conf,SubPath:dispersion.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6s2xr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42445,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-ring-rebalance-72p7x_openstack(a9b76963-d66a-43b7-9f1a-ef2a18ef6d02): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:50:55 crc kubenswrapper[4799]: E0121 17:50:55.356630 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/swift-ring-rebalance-72p7x" podUID="a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.800742 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.860195 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqvhs\" (UniqueName: \"kubernetes.io/projected/6266e3d5-e453-43de-9353-84c2d23c23ea-kube-api-access-sqvhs\") pod \"6266e3d5-e453-43de-9353-84c2d23c23ea\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.860478 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-config\") pod \"6266e3d5-e453-43de-9353-84c2d23c23ea\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.860513 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-dns-svc\") pod \"6266e3d5-e453-43de-9353-84c2d23c23ea\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.860528 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-ovsdbserver-sb\") pod \"6266e3d5-e453-43de-9353-84c2d23c23ea\" (UID: \"6266e3d5-e453-43de-9353-84c2d23c23ea\") " Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.867254 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6266e3d5-e453-43de-9353-84c2d23c23ea-kube-api-access-sqvhs" (OuterVolumeSpecName: "kube-api-access-sqvhs") pod "6266e3d5-e453-43de-9353-84c2d23c23ea" (UID: "6266e3d5-e453-43de-9353-84c2d23c23ea"). InnerVolumeSpecName "kube-api-access-sqvhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.897501 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6266e3d5-e453-43de-9353-84c2d23c23ea" (UID: "6266e3d5-e453-43de-9353-84c2d23c23ea"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.898429 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6266e3d5-e453-43de-9353-84c2d23c23ea" (UID: "6266e3d5-e453-43de-9353-84c2d23c23ea"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.899266 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-config" (OuterVolumeSpecName: "config") pod "6266e3d5-e453-43de-9353-84c2d23c23ea" (UID: "6266e3d5-e453-43de-9353-84c2d23c23ea"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.963093 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.963834 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.963892 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqvhs\" (UniqueName: \"kubernetes.io/projected/6266e3d5-e453-43de-9353-84c2d23c23ea-kube-api-access-sqvhs\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.963911 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6266e3d5-e453-43de-9353-84c2d23c23ea-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.970860 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.970940 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.971010 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.974240 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ae5330e16575441a8b84498a2fefd6345766a3ffb339a011bad17c508c054c31"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 17:50:55 crc kubenswrapper[4799]: I0121 17:50:55.974376 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://ae5330e16575441a8b84498a2fefd6345766a3ffb339a011bad17c508c054c31" gracePeriod=600 Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.003801 4799 generic.go:334] "Generic (PLEG): container finished" podID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerID="d18ca9012873ef22c48f7bd29f7fe503167022792fd44328ccf69dd39dbcf871" exitCode=0 Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.003909 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"03a5694f-1e8b-490e-be8f-dce31bdd83c3","Type":"ContainerDied","Data":"d18ca9012873ef22c48f7bd29f7fe503167022792fd44328ccf69dd39dbcf871"} Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.019789 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78bf94944f-splp6" Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.020449 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bf94944f-splp6" event={"ID":"6266e3d5-e453-43de-9353-84c2d23c23ea","Type":"ContainerDied","Data":"c3bf44a27069777180a70d5427e689950b15c91d6c116d52dff42fba7ce1fa6f"} Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.020511 4799 scope.go:117] "RemoveContainer" containerID="2482768cdb5c44f0fc160dd31ab4420eb5a058bfef55b21537971c159f83f351" Jan 21 17:50:56 crc kubenswrapper[4799]: E0121 17:50:56.021339 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-swift-proxy-server:watcher_latest\\\"\"" pod="openstack/swift-ring-rebalance-72p7x" podUID="a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.046972 4799 scope.go:117] "RemoveContainer" containerID="95cf33382a1d04bf26f3e8044d30a9526342e71d201301b256293c34929cd5ae" Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.113681 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.115816 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.122393 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.126441 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-83af-account-create-update-tz6l9"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.135590 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-4a30-account-create-update-wqr8v"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.145558 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5362-account-create-update-9xw4s"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.152622 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-6xr5d"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.173048 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78bf94944f-splp6"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.179727 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78bf94944f-splp6"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.223720 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" path="/var/lib/kubelet/pods/6266e3d5-e453-43de-9353-84c2d23c23ea/volumes" Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.273830 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-rg5tc"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.280297 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-25rbc"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.286623 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-mxsmt"] Jan 21 17:50:56 crc kubenswrapper[4799]: I0121 17:50:56.393856 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.031759 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mxsmt" event={"ID":"eea46be9-f407-41a1-a0c0-e80caa761e8d","Type":"ContainerStarted","Data":"60d07fc4f283d0413eb453b0c4fc132eb0899fcb028cc169f28004166f3009a8"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.033243 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rg5tc" event={"ID":"693423df-cd0b-4d1e-a58d-ec5f062db23d","Type":"ContainerStarted","Data":"cc1b631f345ac346d69598bf7b4a4be2d76bf72b77f9824529334f52f0f80fcf"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.034779 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5362-account-create-update-9xw4s" event={"ID":"39b6f6dd-e24e-4398-87d7-0fc790374a12","Type":"ContainerStarted","Data":"c468ce2f0bed80250a3d6069c386f5081dc869cc064504b993f432eaf4ad46cd"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.037176 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-83af-account-create-update-tz6l9" event={"ID":"082ee676-0c0e-48fc-a537-aac7e95dd4ae","Type":"ContainerStarted","Data":"19a2f5825e725703dd7c87b967dd253c4c2cde5c06fe0f47ae4ab963988042db"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.038482 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-25rbc" event={"ID":"dc6f84a8-5008-4534-b894-1caa2f8585da","Type":"ContainerStarted","Data":"e63f8826075f582459815d61b9c56ef93e7d9d2feb0e1f6564540ab3e5b0d4d5"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.040048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-4a30-account-create-update-wqr8v" event={"ID":"747c974f-6219-4bb3-a6d0-e657bd201d5d","Type":"ContainerStarted","Data":"037da1ec757c6c3a5621741c7ccc8d58a7656c5e926475137dd4a5108b99a1a0"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.040086 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-4a30-account-create-update-wqr8v" event={"ID":"747c974f-6219-4bb3-a6d0-e657bd201d5d","Type":"ContainerStarted","Data":"86076e65d6ed55456f6eac79ac72b8dbd8bf806aff5adddc2fd712b8f7415f2f"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.042539 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="ae5330e16575441a8b84498a2fefd6345766a3ffb339a011bad17c508c054c31" exitCode=0 Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.042631 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"ae5330e16575441a8b84498a2fefd6345766a3ffb339a011bad17c508c054c31"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.042736 4799 scope.go:117] "RemoveContainer" containerID="eed6e35e0dd567b7136adb6f803c960c31a5e8beac68fc922967bfc8623a01c5" Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.045389 4799 generic.go:334] "Generic (PLEG): container finished" podID="d517139c-ff8f-4320-8901-06ff9955241c" containerID="afa598ced7a45ee637cd2a3f9a30e04109a5c2481da7c44df6e5f52ef667d3c6" exitCode=0 Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.045425 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6xr5d" event={"ID":"d517139c-ff8f-4320-8901-06ff9955241c","Type":"ContainerDied","Data":"afa598ced7a45ee637cd2a3f9a30e04109a5c2481da7c44df6e5f52ef667d3c6"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.045445 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6xr5d" event={"ID":"d517139c-ff8f-4320-8901-06ff9955241c","Type":"ContainerStarted","Data":"9fc6032b32c996b7fe381109240747a82a4eb406da80c177cee2d4600e4d3bf5"} Jan 21 17:50:57 crc kubenswrapper[4799]: I0121 17:50:57.064576 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-4a30-account-create-update-wqr8v" podStartSLOduration=11.064556305 podStartE2EDuration="11.064556305s" podCreationTimestamp="2026-01-21 17:50:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:50:57.057804565 +0000 UTC m=+1083.684094588" watchObservedRunningTime="2026-01-21 17:50:57.064556305 +0000 UTC m=+1083.690846328" Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.054921 4799 generic.go:334] "Generic (PLEG): container finished" podID="39b6f6dd-e24e-4398-87d7-0fc790374a12" containerID="995cae26e739d9a456ecb2d6cf1a4bcb754bfddde480558e3d28f917fe5b4814" exitCode=0 Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.054975 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5362-account-create-update-9xw4s" event={"ID":"39b6f6dd-e24e-4398-87d7-0fc790374a12","Type":"ContainerDied","Data":"995cae26e739d9a456ecb2d6cf1a4bcb754bfddde480558e3d28f917fe5b4814"} Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.057228 4799 generic.go:334] "Generic (PLEG): container finished" podID="63677f61-4283-417a-bcf7-303840452589" containerID="3681834a9f785cada47be88dfb5ed1ef26743bce72da0c4700f98cfe1e1f5a32" exitCode=0 Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.057298 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"63677f61-4283-417a-bcf7-303840452589","Type":"ContainerDied","Data":"3681834a9f785cada47be88dfb5ed1ef26743bce72da0c4700f98cfe1e1f5a32"} Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.059232 4799 generic.go:334] "Generic (PLEG): container finished" podID="082ee676-0c0e-48fc-a537-aac7e95dd4ae" containerID="56cc341f636759eb8be73d924f3983d29ea8ee7c208aca3c96869ee1168415a0" exitCode=0 Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.059351 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-83af-account-create-update-tz6l9" event={"ID":"082ee676-0c0e-48fc-a537-aac7e95dd4ae","Type":"ContainerDied","Data":"56cc341f636759eb8be73d924f3983d29ea8ee7c208aca3c96869ee1168415a0"} Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.061816 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"03a5694f-1e8b-490e-be8f-dce31bdd83c3","Type":"ContainerStarted","Data":"e19b9f0e038c9ded9ea4b11681266954f4ea8cb749b3b051e6c9dbd2bb1f658d"} Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.062100 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.063438 4799 generic.go:334] "Generic (PLEG): container finished" podID="747c974f-6219-4bb3-a6d0-e657bd201d5d" containerID="037da1ec757c6c3a5621741c7ccc8d58a7656c5e926475137dd4a5108b99a1a0" exitCode=0 Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.063495 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-4a30-account-create-update-wqr8v" event={"ID":"747c974f-6219-4bb3-a6d0-e657bd201d5d","Type":"ContainerDied","Data":"037da1ec757c6c3a5621741c7ccc8d58a7656c5e926475137dd4a5108b99a1a0"} Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.227532 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=58.134880854 podStartE2EDuration="1m9.227504936s" podCreationTimestamp="2026-01-21 17:49:49 +0000 UTC" firstStartedPulling="2026-01-21 17:50:11.0010106 +0000 UTC m=+1037.627300633" lastFinishedPulling="2026-01-21 17:50:22.093634692 +0000 UTC m=+1048.719924715" observedRunningTime="2026-01-21 17:50:58.216184099 +0000 UTC m=+1084.842474132" watchObservedRunningTime="2026-01-21 17:50:58.227504936 +0000 UTC m=+1084.853794959" Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.492309 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.585858 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqwhm\" (UniqueName: \"kubernetes.io/projected/d517139c-ff8f-4320-8901-06ff9955241c-kube-api-access-mqwhm\") pod \"d517139c-ff8f-4320-8901-06ff9955241c\" (UID: \"d517139c-ff8f-4320-8901-06ff9955241c\") " Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.585951 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d517139c-ff8f-4320-8901-06ff9955241c-operator-scripts\") pod \"d517139c-ff8f-4320-8901-06ff9955241c\" (UID: \"d517139c-ff8f-4320-8901-06ff9955241c\") " Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.586507 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d517139c-ff8f-4320-8901-06ff9955241c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d517139c-ff8f-4320-8901-06ff9955241c" (UID: "d517139c-ff8f-4320-8901-06ff9955241c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.590038 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d517139c-ff8f-4320-8901-06ff9955241c-kube-api-access-mqwhm" (OuterVolumeSpecName: "kube-api-access-mqwhm") pod "d517139c-ff8f-4320-8901-06ff9955241c" (UID: "d517139c-ff8f-4320-8901-06ff9955241c"). InnerVolumeSpecName "kube-api-access-mqwhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.688014 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqwhm\" (UniqueName: \"kubernetes.io/projected/d517139c-ff8f-4320-8901-06ff9955241c-kube-api-access-mqwhm\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:58 crc kubenswrapper[4799]: I0121 17:50:58.688317 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d517139c-ff8f-4320-8901-06ff9955241c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.077054 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"5fdd831026afa966e0f760fecb7476b95aadfcd525b00468c8c89ce1d2df0632"} Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.079873 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6xr5d" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.079933 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6xr5d" event={"ID":"d517139c-ff8f-4320-8901-06ff9955241c","Type":"ContainerDied","Data":"9fc6032b32c996b7fe381109240747a82a4eb406da80c177cee2d4600e4d3bf5"} Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.080078 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9fc6032b32c996b7fe381109240747a82a4eb406da80c177cee2d4600e4d3bf5" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.471001 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.602015 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.604986 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.606882 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082ee676-0c0e-48fc-a537-aac7e95dd4ae-operator-scripts\") pod \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\" (UID: \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\") " Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.606940 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h585g\" (UniqueName: \"kubernetes.io/projected/082ee676-0c0e-48fc-a537-aac7e95dd4ae-kube-api-access-h585g\") pod \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\" (UID: \"082ee676-0c0e-48fc-a537-aac7e95dd4ae\") " Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.607448 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/082ee676-0c0e-48fc-a537-aac7e95dd4ae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "082ee676-0c0e-48fc-a537-aac7e95dd4ae" (UID: "082ee676-0c0e-48fc-a537-aac7e95dd4ae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.608599 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082ee676-0c0e-48fc-a537-aac7e95dd4ae-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.611988 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/082ee676-0c0e-48fc-a537-aac7e95dd4ae-kube-api-access-h585g" (OuterVolumeSpecName: "kube-api-access-h585g") pod "082ee676-0c0e-48fc-a537-aac7e95dd4ae" (UID: "082ee676-0c0e-48fc-a537-aac7e95dd4ae"). InnerVolumeSpecName "kube-api-access-h585g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.710162 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39b6f6dd-e24e-4398-87d7-0fc790374a12-operator-scripts\") pod \"39b6f6dd-e24e-4398-87d7-0fc790374a12\" (UID: \"39b6f6dd-e24e-4398-87d7-0fc790374a12\") " Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.710221 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbdc6\" (UniqueName: \"kubernetes.io/projected/747c974f-6219-4bb3-a6d0-e657bd201d5d-kube-api-access-sbdc6\") pod \"747c974f-6219-4bb3-a6d0-e657bd201d5d\" (UID: \"747c974f-6219-4bb3-a6d0-e657bd201d5d\") " Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.710308 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747c974f-6219-4bb3-a6d0-e657bd201d5d-operator-scripts\") pod \"747c974f-6219-4bb3-a6d0-e657bd201d5d\" (UID: \"747c974f-6219-4bb3-a6d0-e657bd201d5d\") " Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.710424 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fr59\" (UniqueName: \"kubernetes.io/projected/39b6f6dd-e24e-4398-87d7-0fc790374a12-kube-api-access-9fr59\") pod \"39b6f6dd-e24e-4398-87d7-0fc790374a12\" (UID: \"39b6f6dd-e24e-4398-87d7-0fc790374a12\") " Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.710900 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h585g\" (UniqueName: \"kubernetes.io/projected/082ee676-0c0e-48fc-a537-aac7e95dd4ae-kube-api-access-h585g\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.711092 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/747c974f-6219-4bb3-a6d0-e657bd201d5d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "747c974f-6219-4bb3-a6d0-e657bd201d5d" (UID: "747c974f-6219-4bb3-a6d0-e657bd201d5d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.711450 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39b6f6dd-e24e-4398-87d7-0fc790374a12-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "39b6f6dd-e24e-4398-87d7-0fc790374a12" (UID: "39b6f6dd-e24e-4398-87d7-0fc790374a12"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.714254 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/747c974f-6219-4bb3-a6d0-e657bd201d5d-kube-api-access-sbdc6" (OuterVolumeSpecName: "kube-api-access-sbdc6") pod "747c974f-6219-4bb3-a6d0-e657bd201d5d" (UID: "747c974f-6219-4bb3-a6d0-e657bd201d5d"). InnerVolumeSpecName "kube-api-access-sbdc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.714311 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39b6f6dd-e24e-4398-87d7-0fc790374a12-kube-api-access-9fr59" (OuterVolumeSpecName: "kube-api-access-9fr59") pod "39b6f6dd-e24e-4398-87d7-0fc790374a12" (UID: "39b6f6dd-e24e-4398-87d7-0fc790374a12"). InnerVolumeSpecName "kube-api-access-9fr59". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.812898 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fr59\" (UniqueName: \"kubernetes.io/projected/39b6f6dd-e24e-4398-87d7-0fc790374a12-kube-api-access-9fr59\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.813186 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39b6f6dd-e24e-4398-87d7-0fc790374a12-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.813261 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbdc6\" (UniqueName: \"kubernetes.io/projected/747c974f-6219-4bb3-a6d0-e657bd201d5d-kube-api-access-sbdc6\") on node \"crc\" DevicePath \"\"" Jan 21 17:50:59 crc kubenswrapper[4799]: I0121 17:50:59.813320 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/747c974f-6219-4bb3-a6d0-e657bd201d5d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.030074 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-68wt5" podUID="05213e52-1f99-42a4-b882-4514760063c7" containerName="ovn-controller" probeResult="failure" output=< Jan 21 17:51:00 crc kubenswrapper[4799]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 21 17:51:00 crc kubenswrapper[4799]: > Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.089832 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-25rbc" event={"ID":"dc6f84a8-5008-4534-b894-1caa2f8585da","Type":"ContainerStarted","Data":"22135e09dbaa9baaec640bf5761ff28100f927c16c7d1a6d60b00558f74d18fc"} Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.092165 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerStarted","Data":"50ee3bf8e92ed22d4083ba8c0544f4699656bb71f27a8ae12e04c78f3832b804"} Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.096066 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-4a30-account-create-update-wqr8v" event={"ID":"747c974f-6219-4bb3-a6d0-e657bd201d5d","Type":"ContainerDied","Data":"86076e65d6ed55456f6eac79ac72b8dbd8bf806aff5adddc2fd712b8f7415f2f"} Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.096092 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86076e65d6ed55456f6eac79ac72b8dbd8bf806aff5adddc2fd712b8f7415f2f" Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.096171 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-4a30-account-create-update-wqr8v" Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.098365 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5362-account-create-update-9xw4s" Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.098345 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5362-account-create-update-9xw4s" event={"ID":"39b6f6dd-e24e-4398-87d7-0fc790374a12","Type":"ContainerDied","Data":"c468ce2f0bed80250a3d6069c386f5081dc869cc064504b993f432eaf4ad46cd"} Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.098679 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c468ce2f0bed80250a3d6069c386f5081dc869cc064504b993f432eaf4ad46cd" Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.101008 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"63677f61-4283-417a-bcf7-303840452589","Type":"ContainerStarted","Data":"3709e0416de802cf6dfa796e639c5b91fe585032609d9d1ed1b7f36f87862f8d"} Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.102664 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-83af-account-create-update-tz6l9" event={"ID":"082ee676-0c0e-48fc-a537-aac7e95dd4ae","Type":"ContainerDied","Data":"19a2f5825e725703dd7c87b967dd253c4c2cde5c06fe0f47ae4ab963988042db"} Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.102709 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19a2f5825e725703dd7c87b967dd253c4c2cde5c06fe0f47ae4ab963988042db" Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.102681 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-83af-account-create-update-tz6l9" Jan 21 17:51:00 crc kubenswrapper[4799]: I0121 17:51:00.596002 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-78bf94944f-splp6" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: i/o timeout" Jan 21 17:51:01 crc kubenswrapper[4799]: I0121 17:51:01.114535 4799 generic.go:334] "Generic (PLEG): container finished" podID="dc6f84a8-5008-4534-b894-1caa2f8585da" containerID="22135e09dbaa9baaec640bf5761ff28100f927c16c7d1a6d60b00558f74d18fc" exitCode=0 Jan 21 17:51:01 crc kubenswrapper[4799]: I0121 17:51:01.114611 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-25rbc" event={"ID":"dc6f84a8-5008-4534-b894-1caa2f8585da","Type":"ContainerDied","Data":"22135e09dbaa9baaec640bf5761ff28100f927c16c7d1a6d60b00558f74d18fc"} Jan 21 17:51:01 crc kubenswrapper[4799]: I0121 17:51:01.121931 4799 generic.go:334] "Generic (PLEG): container finished" podID="eea46be9-f407-41a1-a0c0-e80caa761e8d" containerID="eb2904c5b8474e7c920a9a6f608841e76b0952943dd1ba4a3905acac4aa41449" exitCode=0 Jan 21 17:51:01 crc kubenswrapper[4799]: I0121 17:51:01.121991 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mxsmt" event={"ID":"eea46be9-f407-41a1-a0c0-e80caa761e8d","Type":"ContainerDied","Data":"eb2904c5b8474e7c920a9a6f608841e76b0952943dd1ba4a3905acac4aa41449"} Jan 21 17:51:01 crc kubenswrapper[4799]: I0121 17:51:01.125311 4799 generic.go:334] "Generic (PLEG): container finished" podID="693423df-cd0b-4d1e-a58d-ec5f062db23d" containerID="8e4869fe1b4ed83a664528fbe7bb4a92df601aa469f4d511220502d45c923c78" exitCode=0 Jan 21 17:51:01 crc kubenswrapper[4799]: I0121 17:51:01.125494 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rg5tc" event={"ID":"693423df-cd0b-4d1e-a58d-ec5f062db23d","Type":"ContainerDied","Data":"8e4869fe1b4ed83a664528fbe7bb4a92df601aa469f4d511220502d45c923c78"} Jan 21 17:51:01 crc kubenswrapper[4799]: I0121 17:51:01.125560 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:51:01 crc kubenswrapper[4799]: I0121 17:51:01.185899 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-notifications-server-0" podStartSLOduration=71.185870136 podStartE2EDuration="1m11.185870136s" podCreationTimestamp="2026-01-21 17:49:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:51:01.180937027 +0000 UTC m=+1087.807227070" watchObservedRunningTime="2026-01-21 17:51:01.185870136 +0000 UTC m=+1087.812160159" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.668168 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rg5tc" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.677574 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mxsmt" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.743374 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-25rbc" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.788353 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eea46be9-f407-41a1-a0c0-e80caa761e8d-operator-scripts\") pod \"eea46be9-f407-41a1-a0c0-e80caa761e8d\" (UID: \"eea46be9-f407-41a1-a0c0-e80caa761e8d\") " Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.788540 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/693423df-cd0b-4d1e-a58d-ec5f062db23d-operator-scripts\") pod \"693423df-cd0b-4d1e-a58d-ec5f062db23d\" (UID: \"693423df-cd0b-4d1e-a58d-ec5f062db23d\") " Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.788616 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vhlv\" (UniqueName: \"kubernetes.io/projected/693423df-cd0b-4d1e-a58d-ec5f062db23d-kube-api-access-6vhlv\") pod \"693423df-cd0b-4d1e-a58d-ec5f062db23d\" (UID: \"693423df-cd0b-4d1e-a58d-ec5f062db23d\") " Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.788687 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k52xc\" (UniqueName: \"kubernetes.io/projected/eea46be9-f407-41a1-a0c0-e80caa761e8d-kube-api-access-k52xc\") pod \"eea46be9-f407-41a1-a0c0-e80caa761e8d\" (UID: \"eea46be9-f407-41a1-a0c0-e80caa761e8d\") " Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.789371 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eea46be9-f407-41a1-a0c0-e80caa761e8d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eea46be9-f407-41a1-a0c0-e80caa761e8d" (UID: "eea46be9-f407-41a1-a0c0-e80caa761e8d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.789423 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/693423df-cd0b-4d1e-a58d-ec5f062db23d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "693423df-cd0b-4d1e-a58d-ec5f062db23d" (UID: "693423df-cd0b-4d1e-a58d-ec5f062db23d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.798858 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/693423df-cd0b-4d1e-a58d-ec5f062db23d-kube-api-access-6vhlv" (OuterVolumeSpecName: "kube-api-access-6vhlv") pod "693423df-cd0b-4d1e-a58d-ec5f062db23d" (UID: "693423df-cd0b-4d1e-a58d-ec5f062db23d"). InnerVolumeSpecName "kube-api-access-6vhlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.801890 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea46be9-f407-41a1-a0c0-e80caa761e8d-kube-api-access-k52xc" (OuterVolumeSpecName: "kube-api-access-k52xc") pod "eea46be9-f407-41a1-a0c0-e80caa761e8d" (UID: "eea46be9-f407-41a1-a0c0-e80caa761e8d"). InnerVolumeSpecName "kube-api-access-k52xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:03 crc kubenswrapper[4799]: E0121 17:51:03.831813 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/prometheus-metric-storage-0" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.890849 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhks6\" (UniqueName: \"kubernetes.io/projected/dc6f84a8-5008-4534-b894-1caa2f8585da-kube-api-access-qhks6\") pod \"dc6f84a8-5008-4534-b894-1caa2f8585da\" (UID: \"dc6f84a8-5008-4534-b894-1caa2f8585da\") " Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.891041 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc6f84a8-5008-4534-b894-1caa2f8585da-operator-scripts\") pod \"dc6f84a8-5008-4534-b894-1caa2f8585da\" (UID: \"dc6f84a8-5008-4534-b894-1caa2f8585da\") " Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.891610 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/693423df-cd0b-4d1e-a58d-ec5f062db23d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.891638 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vhlv\" (UniqueName: \"kubernetes.io/projected/693423df-cd0b-4d1e-a58d-ec5f062db23d-kube-api-access-6vhlv\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.891656 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k52xc\" (UniqueName: \"kubernetes.io/projected/eea46be9-f407-41a1-a0c0-e80caa761e8d-kube-api-access-k52xc\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.891671 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eea46be9-f407-41a1-a0c0-e80caa761e8d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.891671 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc6f84a8-5008-4534-b894-1caa2f8585da-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dc6f84a8-5008-4534-b894-1caa2f8585da" (UID: "dc6f84a8-5008-4534-b894-1caa2f8585da"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.895819 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc6f84a8-5008-4534-b894-1caa2f8585da-kube-api-access-qhks6" (OuterVolumeSpecName: "kube-api-access-qhks6") pod "dc6f84a8-5008-4534-b894-1caa2f8585da" (UID: "dc6f84a8-5008-4534-b894-1caa2f8585da"). InnerVolumeSpecName "kube-api-access-qhks6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.994111 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc6f84a8-5008-4534-b894-1caa2f8585da-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:03 crc kubenswrapper[4799]: I0121 17:51:03.994166 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhks6\" (UniqueName: \"kubernetes.io/projected/dc6f84a8-5008-4534-b894-1caa2f8585da-kube-api-access-qhks6\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.152737 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-25rbc" event={"ID":"dc6f84a8-5008-4534-b894-1caa2f8585da","Type":"ContainerDied","Data":"e63f8826075f582459815d61b9c56ef93e7d9d2feb0e1f6564540ab3e5b0d4d5"} Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.153029 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e63f8826075f582459815d61b9c56ef93e7d9d2feb0e1f6564540ab3e5b0d4d5" Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.152819 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-25rbc" Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.155688 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerStarted","Data":"bd4c96de985b45b615e4cbe201c49e4a6ca0aff9d074405dd85f9edc5adc331f"} Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.157451 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-mxsmt" event={"ID":"eea46be9-f407-41a1-a0c0-e80caa761e8d","Type":"ContainerDied","Data":"60d07fc4f283d0413eb453b0c4fc132eb0899fcb028cc169f28004166f3009a8"} Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.157477 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60d07fc4f283d0413eb453b0c4fc132eb0899fcb028cc169f28004166f3009a8" Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.157496 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-mxsmt" Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.165416 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-rg5tc" event={"ID":"693423df-cd0b-4d1e-a58d-ec5f062db23d","Type":"ContainerDied","Data":"cc1b631f345ac346d69598bf7b4a4be2d76bf72b77f9824529334f52f0f80fcf"} Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.165462 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc1b631f345ac346d69598bf7b4a4be2d76bf72b77f9824529334f52f0f80fcf" Jan 21 17:51:04 crc kubenswrapper[4799]: I0121 17:51:04.165533 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-rg5tc" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.025946 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-68wt5" podUID="05213e52-1f99-42a4-b882-4514760063c7" containerName="ovn-controller" probeResult="failure" output=< Jan 21 17:51:05 crc kubenswrapper[4799]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 21 17:51:05 crc kubenswrapper[4799]: > Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.087978 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.088481 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5dwpd" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.346625 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-68wt5-config-mqf6h"] Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347109 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerName="dnsmasq-dns" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347149 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerName="dnsmasq-dns" Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347171 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc6f84a8-5008-4534-b894-1caa2f8585da" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347179 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc6f84a8-5008-4534-b894-1caa2f8585da" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347191 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082ee676-0c0e-48fc-a537-aac7e95dd4ae" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347201 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="082ee676-0c0e-48fc-a537-aac7e95dd4ae" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347218 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="747c974f-6219-4bb3-a6d0-e657bd201d5d" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347225 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="747c974f-6219-4bb3-a6d0-e657bd201d5d" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347236 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="693423df-cd0b-4d1e-a58d-ec5f062db23d" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347243 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="693423df-cd0b-4d1e-a58d-ec5f062db23d" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347253 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea46be9-f407-41a1-a0c0-e80caa761e8d" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347261 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea46be9-f407-41a1-a0c0-e80caa761e8d" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347282 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39b6f6dd-e24e-4398-87d7-0fc790374a12" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347290 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="39b6f6dd-e24e-4398-87d7-0fc790374a12" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347307 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerName="init" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347317 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerName="init" Jan 21 17:51:05 crc kubenswrapper[4799]: E0121 17:51:05.347335 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d517139c-ff8f-4320-8901-06ff9955241c" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347342 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d517139c-ff8f-4320-8901-06ff9955241c" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347555 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="747c974f-6219-4bb3-a6d0-e657bd201d5d" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347573 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc6f84a8-5008-4534-b894-1caa2f8585da" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347590 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="082ee676-0c0e-48fc-a537-aac7e95dd4ae" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347605 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6266e3d5-e453-43de-9353-84c2d23c23ea" containerName="dnsmasq-dns" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347623 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="693423df-cd0b-4d1e-a58d-ec5f062db23d" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347638 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="39b6f6dd-e24e-4398-87d7-0fc790374a12" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347652 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea46be9-f407-41a1-a0c0-e80caa761e8d" containerName="mariadb-account-create-update" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.347661 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d517139c-ff8f-4320-8901-06ff9955241c" containerName="mariadb-database-create" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.348546 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.351078 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.365250 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-68wt5-config-mqf6h"] Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.418287 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-log-ovn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.418499 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46rhn\" (UniqueName: \"kubernetes.io/projected/cdfa7e30-e283-44a1-b52c-22e0a87292ca-kube-api-access-46rhn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.418782 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.418935 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-scripts\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.419004 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-additional-scripts\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.419100 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run-ovn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.521346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.521423 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-scripts\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.521461 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-additional-scripts\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.521502 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run-ovn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.521563 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-log-ovn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.521657 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.521671 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46rhn\" (UniqueName: \"kubernetes.io/projected/cdfa7e30-e283-44a1-b52c-22e0a87292ca-kube-api-access-46rhn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.521939 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run-ovn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.522378 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-additional-scripts\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.522485 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-log-ovn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.524256 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-scripts\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.544933 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46rhn\" (UniqueName: \"kubernetes.io/projected/cdfa7e30-e283-44a1-b52c-22e0a87292ca-kube-api-access-46rhn\") pod \"ovn-controller-68wt5-config-mqf6h\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:05 crc kubenswrapper[4799]: I0121 17:51:05.667350 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:06 crc kubenswrapper[4799]: I0121 17:51:06.177905 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-68wt5-config-mqf6h"] Jan 21 17:51:06 crc kubenswrapper[4799]: I0121 17:51:06.192766 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerStarted","Data":"9fcf6ce33314351c1e09c5a7bd810145554bb26679d9a852e6f90bf8138fbe4a"} Jan 21 17:51:06 crc kubenswrapper[4799]: I0121 17:51:06.229262 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=29.20139604 podStartE2EDuration="1m10.229238694s" podCreationTimestamp="2026-01-21 17:49:56 +0000 UTC" firstStartedPulling="2026-01-21 17:50:23.963680134 +0000 UTC m=+1050.589970157" lastFinishedPulling="2026-01-21 17:51:04.991522788 +0000 UTC m=+1091.617812811" observedRunningTime="2026-01-21 17:51:06.225893741 +0000 UTC m=+1092.852183764" watchObservedRunningTime="2026-01-21 17:51:06.229238694 +0000 UTC m=+1092.855528727" Jan 21 17:51:07 crc kubenswrapper[4799]: I0121 17:51:07.201913 4799 generic.go:334] "Generic (PLEG): container finished" podID="48f0f966-0779-4959-884e-eae4ed66e969" containerID="c44dea80f4b6b10d56559fe49cb3b1af988bd74e190232574355f35b1495761d" exitCode=0 Jan 21 17:51:07 crc kubenswrapper[4799]: I0121 17:51:07.201995 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"48f0f966-0779-4959-884e-eae4ed66e969","Type":"ContainerDied","Data":"c44dea80f4b6b10d56559fe49cb3b1af988bd74e190232574355f35b1495761d"} Jan 21 17:51:07 crc kubenswrapper[4799]: I0121 17:51:07.205725 4799 generic.go:334] "Generic (PLEG): container finished" podID="cdfa7e30-e283-44a1-b52c-22e0a87292ca" containerID="2904f03ad517fa9f0976131f578ba33ac0977a131bf57e2229450a99359c1801" exitCode=0 Jan 21 17:51:07 crc kubenswrapper[4799]: I0121 17:51:07.205774 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-68wt5-config-mqf6h" event={"ID":"cdfa7e30-e283-44a1-b52c-22e0a87292ca","Type":"ContainerDied","Data":"2904f03ad517fa9f0976131f578ba33ac0977a131bf57e2229450a99359c1801"} Jan 21 17:51:07 crc kubenswrapper[4799]: I0121 17:51:07.205821 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-68wt5-config-mqf6h" event={"ID":"cdfa7e30-e283-44a1-b52c-22e0a87292ca","Type":"ContainerStarted","Data":"661e1fe324116493f2ac72d0a6a1341a0ddfa1fb1437d6b0f630f59597d2e0b0"} Jan 21 17:51:07 crc kubenswrapper[4799]: I0121 17:51:07.987896 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-mxsmt"] Jan 21 17:51:07 crc kubenswrapper[4799]: I0121 17:51:07.997386 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-mxsmt"] Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.223105 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eea46be9-f407-41a1-a0c0-e80caa761e8d" path="/var/lib/kubelet/pods/eea46be9-f407-41a1-a0c0-e80caa761e8d/volumes" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.226264 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"48f0f966-0779-4959-884e-eae4ed66e969","Type":"ContainerStarted","Data":"0a1bba0cc2d36467280bd23ec59ec0a87b3c3d464346de4cff87e4c3a2018228"} Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.227443 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.229236 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-72p7x" event={"ID":"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02","Type":"ContainerStarted","Data":"09b421e7dfb8552f49a134554271f27d62edeef25412e71e3bfb31ade6741966"} Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.255508 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371957.59929 podStartE2EDuration="1m19.255484805s" podCreationTimestamp="2026-01-21 17:49:49 +0000 UTC" firstStartedPulling="2026-01-21 17:49:51.37070092 +0000 UTC m=+1017.996990943" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:51:08.248336504 +0000 UTC m=+1094.874626527" watchObservedRunningTime="2026-01-21 17:51:08.255484805 +0000 UTC m=+1094.881774818" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.276932 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-72p7x" podStartSLOduration=1.899398792 podStartE2EDuration="26.276908265s" podCreationTimestamp="2026-01-21 17:50:42 +0000 UTC" firstStartedPulling="2026-01-21 17:50:42.931241572 +0000 UTC m=+1069.557531595" lastFinishedPulling="2026-01-21 17:51:07.308751045 +0000 UTC m=+1093.935041068" observedRunningTime="2026-01-21 17:51:08.270533126 +0000 UTC m=+1094.896823149" watchObservedRunningTime="2026-01-21 17:51:08.276908265 +0000 UTC m=+1094.903198308" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.542400 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.554308 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.678891 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run\") pod \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679242 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-log-ovn\") pod \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679291 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-scripts\") pod \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679309 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run-ovn\") pod \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679331 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46rhn\" (UniqueName: \"kubernetes.io/projected/cdfa7e30-e283-44a1-b52c-22e0a87292ca-kube-api-access-46rhn\") pod \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679365 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-additional-scripts\") pod \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\" (UID: \"cdfa7e30-e283-44a1-b52c-22e0a87292ca\") " Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679005 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run" (OuterVolumeSpecName: "var-run") pod "cdfa7e30-e283-44a1-b52c-22e0a87292ca" (UID: "cdfa7e30-e283-44a1-b52c-22e0a87292ca"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679404 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "cdfa7e30-e283-44a1-b52c-22e0a87292ca" (UID: "cdfa7e30-e283-44a1-b52c-22e0a87292ca"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679486 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "cdfa7e30-e283-44a1-b52c-22e0a87292ca" (UID: "cdfa7e30-e283-44a1-b52c-22e0a87292ca"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679809 4799 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679824 4799 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.679835 4799 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdfa7e30-e283-44a1-b52c-22e0a87292ca-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.680557 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "cdfa7e30-e283-44a1-b52c-22e0a87292ca" (UID: "cdfa7e30-e283-44a1-b52c-22e0a87292ca"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.680621 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-scripts" (OuterVolumeSpecName: "scripts") pod "cdfa7e30-e283-44a1-b52c-22e0a87292ca" (UID: "cdfa7e30-e283-44a1-b52c-22e0a87292ca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.685880 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdfa7e30-e283-44a1-b52c-22e0a87292ca-kube-api-access-46rhn" (OuterVolumeSpecName: "kube-api-access-46rhn") pod "cdfa7e30-e283-44a1-b52c-22e0a87292ca" (UID: "cdfa7e30-e283-44a1-b52c-22e0a87292ca"). InnerVolumeSpecName "kube-api-access-46rhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.781702 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.781734 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46rhn\" (UniqueName: \"kubernetes.io/projected/cdfa7e30-e283-44a1-b52c-22e0a87292ca-kube-api-access-46rhn\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:08 crc kubenswrapper[4799]: I0121 17:51:08.781746 4799 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/cdfa7e30-e283-44a1-b52c-22e0a87292ca-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:09 crc kubenswrapper[4799]: I0121 17:51:09.241572 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-68wt5-config-mqf6h" Jan 21 17:51:09 crc kubenswrapper[4799]: I0121 17:51:09.242828 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-68wt5-config-mqf6h" event={"ID":"cdfa7e30-e283-44a1-b52c-22e0a87292ca","Type":"ContainerDied","Data":"661e1fe324116493f2ac72d0a6a1341a0ddfa1fb1437d6b0f630f59597d2e0b0"} Jan 21 17:51:09 crc kubenswrapper[4799]: I0121 17:51:09.242965 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="661e1fe324116493f2ac72d0a6a1341a0ddfa1fb1437d6b0f630f59597d2e0b0" Jan 21 17:51:09 crc kubenswrapper[4799]: I0121 17:51:09.685341 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-68wt5-config-mqf6h"] Jan 21 17:51:09 crc kubenswrapper[4799]: I0121 17:51:09.693633 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-68wt5-config-mqf6h"] Jan 21 17:51:10 crc kubenswrapper[4799]: I0121 17:51:10.106738 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:51:10 crc kubenswrapper[4799]: E0121 17:51:10.106978 4799 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 21 17:51:10 crc kubenswrapper[4799]: E0121 17:51:10.107013 4799 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 21 17:51:10 crc kubenswrapper[4799]: E0121 17:51:10.107093 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift podName:771ea47a-76eb-434d-ac1f-cf6048f08237 nodeName:}" failed. No retries permitted until 2026-01-21 17:51:42.107058808 +0000 UTC m=+1128.733348841 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift") pod "swift-storage-0" (UID: "771ea47a-76eb-434d-ac1f-cf6048f08237") : configmap "swift-ring-files" not found Jan 21 17:51:10 crc kubenswrapper[4799]: I0121 17:51:10.174200 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-68wt5" Jan 21 17:51:10 crc kubenswrapper[4799]: I0121 17:51:10.215752 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdfa7e30-e283-44a1-b52c-22e0a87292ca" path="/var/lib/kubelet/pods/cdfa7e30-e283-44a1-b52c-22e0a87292ca/volumes" Jan 21 17:51:11 crc kubenswrapper[4799]: I0121 17:51:11.219538 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Jan 21 17:51:11 crc kubenswrapper[4799]: I0121 17:51:11.870878 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-notifications-server-0" podUID="63677f61-4283-417a-bcf7-303840452589" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.017057 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-9m9t5"] Jan 21 17:51:13 crc kubenswrapper[4799]: E0121 17:51:13.017849 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdfa7e30-e283-44a1-b52c-22e0a87292ca" containerName="ovn-config" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.017868 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdfa7e30-e283-44a1-b52c-22e0a87292ca" containerName="ovn-config" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.018164 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdfa7e30-e283-44a1-b52c-22e0a87292ca" containerName="ovn-config" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.019100 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.022604 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.030273 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-9m9t5"] Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.173985 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4hxf\" (UniqueName: \"kubernetes.io/projected/c28dcade-7a63-4e0c-988b-1aac42353632-kube-api-access-d4hxf\") pod \"root-account-create-update-9m9t5\" (UID: \"c28dcade-7a63-4e0c-988b-1aac42353632\") " pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.174416 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28dcade-7a63-4e0c-988b-1aac42353632-operator-scripts\") pod \"root-account-create-update-9m9t5\" (UID: \"c28dcade-7a63-4e0c-988b-1aac42353632\") " pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.276628 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4hxf\" (UniqueName: \"kubernetes.io/projected/c28dcade-7a63-4e0c-988b-1aac42353632-kube-api-access-d4hxf\") pod \"root-account-create-update-9m9t5\" (UID: \"c28dcade-7a63-4e0c-988b-1aac42353632\") " pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.276693 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28dcade-7a63-4e0c-988b-1aac42353632-operator-scripts\") pod \"root-account-create-update-9m9t5\" (UID: \"c28dcade-7a63-4e0c-988b-1aac42353632\") " pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.277987 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28dcade-7a63-4e0c-988b-1aac42353632-operator-scripts\") pod \"root-account-create-update-9m9t5\" (UID: \"c28dcade-7a63-4e0c-988b-1aac42353632\") " pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.302015 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4hxf\" (UniqueName: \"kubernetes.io/projected/c28dcade-7a63-4e0c-988b-1aac42353632-kube-api-access-d4hxf\") pod \"root-account-create-update-9m9t5\" (UID: \"c28dcade-7a63-4e0c-988b-1aac42353632\") " pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.351496 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.542685 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.548255 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:13 crc kubenswrapper[4799]: I0121 17:51:13.863778 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-9m9t5"] Jan 21 17:51:14 crc kubenswrapper[4799]: I0121 17:51:14.281193 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9m9t5" event={"ID":"c28dcade-7a63-4e0c-988b-1aac42353632","Type":"ContainerStarted","Data":"e268b7b6e4009e5ade7686cf1fb40157fab2888cd1c7a6aca298b45d83edfbf5"} Jan 21 17:51:14 crc kubenswrapper[4799]: I0121 17:51:14.282433 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9m9t5" event={"ID":"c28dcade-7a63-4e0c-988b-1aac42353632","Type":"ContainerStarted","Data":"b5671e1c93e305186c2d2fb760872ab959ef2deb2af71adb39034b5bd4e64a81"} Jan 21 17:51:14 crc kubenswrapper[4799]: I0121 17:51:14.283405 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:14 crc kubenswrapper[4799]: I0121 17:51:14.304229 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-9m9t5" podStartSLOduration=2.304204345 podStartE2EDuration="2.304204345s" podCreationTimestamp="2026-01-21 17:51:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:51:14.2929692 +0000 UTC m=+1100.919259233" watchObservedRunningTime="2026-01-21 17:51:14.304204345 +0000 UTC m=+1100.930494368" Jan 21 17:51:15 crc kubenswrapper[4799]: I0121 17:51:15.291072 4799 generic.go:334] "Generic (PLEG): container finished" podID="c28dcade-7a63-4e0c-988b-1aac42353632" containerID="e268b7b6e4009e5ade7686cf1fb40157fab2888cd1c7a6aca298b45d83edfbf5" exitCode=0 Jan 21 17:51:15 crc kubenswrapper[4799]: I0121 17:51:15.291166 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9m9t5" event={"ID":"c28dcade-7a63-4e0c-988b-1aac42353632","Type":"ContainerDied","Data":"e268b7b6e4009e5ade7686cf1fb40157fab2888cd1c7a6aca298b45d83edfbf5"} Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.302861 4799 generic.go:334] "Generic (PLEG): container finished" podID="a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" containerID="09b421e7dfb8552f49a134554271f27d62edeef25412e71e3bfb31ade6741966" exitCode=0 Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.302951 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-72p7x" event={"ID":"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02","Type":"ContainerDied","Data":"09b421e7dfb8552f49a134554271f27d62edeef25412e71e3bfb31ade6741966"} Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.560969 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.561594 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="config-reloader" containerID="cri-o://50ee3bf8e92ed22d4083ba8c0544f4699656bb71f27a8ae12e04c78f3832b804" gracePeriod=600 Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.562034 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="prometheus" containerID="cri-o://9fcf6ce33314351c1e09c5a7bd810145554bb26679d9a852e6f90bf8138fbe4a" gracePeriod=600 Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.562092 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="thanos-sidecar" containerID="cri-o://bd4c96de985b45b615e4cbe201c49e4a6ca0aff9d074405dd85f9edc5adc331f" gracePeriod=600 Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.611090 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.746098 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4hxf\" (UniqueName: \"kubernetes.io/projected/c28dcade-7a63-4e0c-988b-1aac42353632-kube-api-access-d4hxf\") pod \"c28dcade-7a63-4e0c-988b-1aac42353632\" (UID: \"c28dcade-7a63-4e0c-988b-1aac42353632\") " Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.746392 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28dcade-7a63-4e0c-988b-1aac42353632-operator-scripts\") pod \"c28dcade-7a63-4e0c-988b-1aac42353632\" (UID: \"c28dcade-7a63-4e0c-988b-1aac42353632\") " Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.746703 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c28dcade-7a63-4e0c-988b-1aac42353632-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c28dcade-7a63-4e0c-988b-1aac42353632" (UID: "c28dcade-7a63-4e0c-988b-1aac42353632"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.747071 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28dcade-7a63-4e0c-988b-1aac42353632-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.751344 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c28dcade-7a63-4e0c-988b-1aac42353632-kube-api-access-d4hxf" (OuterVolumeSpecName: "kube-api-access-d4hxf") pod "c28dcade-7a63-4e0c-988b-1aac42353632" (UID: "c28dcade-7a63-4e0c-988b-1aac42353632"). InnerVolumeSpecName "kube-api-access-d4hxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:16 crc kubenswrapper[4799]: I0121 17:51:16.848442 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4hxf\" (UniqueName: \"kubernetes.io/projected/c28dcade-7a63-4e0c-988b-1aac42353632-kube-api-access-d4hxf\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.315560 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9m9t5" event={"ID":"c28dcade-7a63-4e0c-988b-1aac42353632","Type":"ContainerDied","Data":"b5671e1c93e305186c2d2fb760872ab959ef2deb2af71adb39034b5bd4e64a81"} Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.315607 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5671e1c93e305186c2d2fb760872ab959ef2deb2af71adb39034b5bd4e64a81" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.315668 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9m9t5" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.326200 4799 generic.go:334] "Generic (PLEG): container finished" podID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerID="9fcf6ce33314351c1e09c5a7bd810145554bb26679d9a852e6f90bf8138fbe4a" exitCode=0 Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.326244 4799 generic.go:334] "Generic (PLEG): container finished" podID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerID="bd4c96de985b45b615e4cbe201c49e4a6ca0aff9d074405dd85f9edc5adc331f" exitCode=0 Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.326255 4799 generic.go:334] "Generic (PLEG): container finished" podID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerID="50ee3bf8e92ed22d4083ba8c0544f4699656bb71f27a8ae12e04c78f3832b804" exitCode=0 Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.326287 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerDied","Data":"9fcf6ce33314351c1e09c5a7bd810145554bb26679d9a852e6f90bf8138fbe4a"} Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.326323 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerDied","Data":"bd4c96de985b45b615e4cbe201c49e4a6ca0aff9d074405dd85f9edc5adc331f"} Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.326357 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerDied","Data":"50ee3bf8e92ed22d4083ba8c0544f4699656bb71f27a8ae12e04c78f3832b804"} Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.478189 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.562737 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-2\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.563444 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-2" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-2") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.563693 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-0\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564062 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564152 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-web-config\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564180 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-tls-assets\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564310 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564366 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-1\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564661 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxx4k\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-kube-api-access-bxx4k\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564702 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-config\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564725 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-thanos-prometheus-http-client-file\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564745 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c3dbd916-66ac-4f70-a011-68d4195c5c44-config-out\") pod \"c3dbd916-66ac-4f70-a011-68d4195c5c44\" (UID: \"c3dbd916-66ac-4f70-a011-68d4195c5c44\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.564976 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-1" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-1") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.565295 4799 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-1\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.565309 4799 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-2\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.565319 4799 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c3dbd916-66ac-4f70-a011-68d4195c5c44-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.569405 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.570205 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-config" (OuterVolumeSpecName: "config") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.570289 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.572439 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-kube-api-access-bxx4k" (OuterVolumeSpecName: "kube-api-access-bxx4k") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "kube-api-access-bxx4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.572545 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3dbd916-66ac-4f70-a011-68d4195c5c44-config-out" (OuterVolumeSpecName: "config-out") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.601102 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-web-config" (OuterVolumeSpecName: "web-config") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.609893 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "c3dbd916-66ac-4f70-a011-68d4195c5c44" (UID: "c3dbd916-66ac-4f70-a011-68d4195c5c44"). InnerVolumeSpecName "pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.661558 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.667375 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") on node \"crc\" " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.667639 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxx4k\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-kube-api-access-bxx4k\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.667716 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.667781 4799 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.667849 4799 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c3dbd916-66ac-4f70-a011-68d4195c5c44-config-out\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.667913 4799 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c3dbd916-66ac-4f70-a011-68d4195c5c44-web-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.667978 4799 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c3dbd916-66ac-4f70-a011-68d4195c5c44-tls-assets\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.708243 4799 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.708430 4799 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea") on node "crc" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.769192 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-ring-data-devices\") pod \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.769350 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-swiftconf\") pod \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.769386 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-scripts\") pod \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.769444 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-etc-swift\") pod \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.769486 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-combined-ca-bundle\") pod \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.769592 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6s2xr\" (UniqueName: \"kubernetes.io/projected/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-kube-api-access-6s2xr\") pod \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.769621 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-dispersionconf\") pod \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\" (UID: \"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02\") " Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.769766 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" (UID: "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.770265 4799 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.770294 4799 reconciler_common.go:293] "Volume detached for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.771002 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" (UID: "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.774856 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-kube-api-access-6s2xr" (OuterVolumeSpecName: "kube-api-access-6s2xr") pod "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" (UID: "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02"). InnerVolumeSpecName "kube-api-access-6s2xr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.777799 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" (UID: "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.807380 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" (UID: "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.810101 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-scripts" (OuterVolumeSpecName: "scripts") pod "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" (UID: "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.818383 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" (UID: "a9b76963-d66a-43b7-9f1a-ef2a18ef6d02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.872665 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6s2xr\" (UniqueName: \"kubernetes.io/projected/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-kube-api-access-6s2xr\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.872713 4799 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.872728 4799 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.872738 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.872750 4799 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:17 crc kubenswrapper[4799]: I0121 17:51:17.872761 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b76963-d66a-43b7-9f1a-ef2a18ef6d02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.337039 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-72p7x" event={"ID":"a9b76963-d66a-43b7-9f1a-ef2a18ef6d02","Type":"ContainerDied","Data":"2135408d978a51424cfa6a1a820a03cb15e63b85b808cf4f44556097da29c5ff"} Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.337084 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-72p7x" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.337115 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2135408d978a51424cfa6a1a820a03cb15e63b85b808cf4f44556097da29c5ff" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.343283 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c3dbd916-66ac-4f70-a011-68d4195c5c44","Type":"ContainerDied","Data":"9d3e0550623e5faca9b9fa7854b55fba588614d2dbccfbd3cb8345d670ceea7b"} Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.343371 4799 scope.go:117] "RemoveContainer" containerID="9fcf6ce33314351c1e09c5a7bd810145554bb26679d9a852e6f90bf8138fbe4a" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.343396 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.374352 4799 scope.go:117] "RemoveContainer" containerID="bd4c96de985b45b615e4cbe201c49e4a6ca0aff9d074405dd85f9edc5adc331f" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.385915 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.395407 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.400683 4799 scope.go:117] "RemoveContainer" containerID="50ee3bf8e92ed22d4083ba8c0544f4699656bb71f27a8ae12e04c78f3832b804" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.424416 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:51:18 crc kubenswrapper[4799]: E0121 17:51:18.424815 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c28dcade-7a63-4e0c-988b-1aac42353632" containerName="mariadb-account-create-update" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.424833 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c28dcade-7a63-4e0c-988b-1aac42353632" containerName="mariadb-account-create-update" Jan 21 17:51:18 crc kubenswrapper[4799]: E0121 17:51:18.424849 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" containerName="swift-ring-rebalance" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.424856 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" containerName="swift-ring-rebalance" Jan 21 17:51:18 crc kubenswrapper[4799]: E0121 17:51:18.424879 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="prometheus" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.424885 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="prometheus" Jan 21 17:51:18 crc kubenswrapper[4799]: E0121 17:51:18.424897 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="config-reloader" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.424902 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="config-reloader" Jan 21 17:51:18 crc kubenswrapper[4799]: E0121 17:51:18.424917 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="thanos-sidecar" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.424924 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="thanos-sidecar" Jan 21 17:51:18 crc kubenswrapper[4799]: E0121 17:51:18.424932 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="init-config-reloader" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.424938 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="init-config-reloader" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.425100 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c28dcade-7a63-4e0c-988b-1aac42353632" containerName="mariadb-account-create-update" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.425116 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="prometheus" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.425143 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="config-reloader" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.425156 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" containerName="thanos-sidecar" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.425166 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9b76963-d66a-43b7-9f1a-ef2a18ef6d02" containerName="swift-ring-rebalance" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.426775 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.427353 4799 scope.go:117] "RemoveContainer" containerID="5f544d8b4e7c301812a7ecb1256ebbdbbadec5f876ac61b8d332b6d8f7b81167" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.435119 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.435210 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.435432 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.435627 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.435637 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-8j9xl" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.436435 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.436565 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.436951 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.440633 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.468157 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586039 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586154 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586187 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586214 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586343 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h9pc\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-kube-api-access-7h9pc\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586367 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586392 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586418 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586456 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586484 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586523 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586555 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.586579 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688516 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688580 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688647 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688697 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688728 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688752 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688829 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h9pc\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-kube-api-access-7h9pc\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688854 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688884 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688909 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688950 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.688981 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.689026 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.690413 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.690436 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.690557 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.693369 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.694189 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.694346 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.695689 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.704809 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.704989 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.705582 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.705605 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.705639 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8cc5b1a9ff3eab274f8795ef882996bdac004679de968d37b59819fb3c1cc7c5/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.706690 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.715994 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h9pc\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-kube-api-access-7h9pc\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.742566 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:18 crc kubenswrapper[4799]: I0121 17:51:18.748778 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:19 crc kubenswrapper[4799]: I0121 17:51:19.258561 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 17:51:19 crc kubenswrapper[4799]: I0121 17:51:19.353523 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerStarted","Data":"85f9b7a8eef993d349d7c1381f2017561667ccb8e633fb5a9ce436d0a05ab278"} Jan 21 17:51:20 crc kubenswrapper[4799]: I0121 17:51:20.215100 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3dbd916-66ac-4f70-a011-68d4195c5c44" path="/var/lib/kubelet/pods/c3dbd916-66ac-4f70-a011-68d4195c5c44/volumes" Jan 21 17:51:21 crc kubenswrapper[4799]: I0121 17:51:21.019881 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="48f0f966-0779-4959-884e-eae4ed66e969" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: connect: connection refused" Jan 21 17:51:21 crc kubenswrapper[4799]: I0121 17:51:21.218422 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:51:21 crc kubenswrapper[4799]: I0121 17:51:21.870408 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-notifications-server-0" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.695837 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-5f84r"] Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.699306 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5f84r" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.704996 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5f84r"] Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.791023 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-665e-account-create-update-h8l8x"] Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.792505 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.795028 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.810685 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-665e-account-create-update-h8l8x"] Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.843528 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1f2fc84-eafa-4f58-9b63-ae4037e16786-operator-scripts\") pod \"glance-db-create-5f84r\" (UID: \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\") " pod="openstack/glance-db-create-5f84r" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.843895 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lndzl\" (UniqueName: \"kubernetes.io/projected/f1f2fc84-eafa-4f58-9b63-ae4037e16786-kube-api-access-lndzl\") pod \"glance-db-create-5f84r\" (UID: \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\") " pod="openstack/glance-db-create-5f84r" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.945862 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad2dcb68-2e9f-4104-9d7e-591638a67f60-operator-scripts\") pod \"glance-665e-account-create-update-h8l8x\" (UID: \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\") " pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.945912 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1f2fc84-eafa-4f58-9b63-ae4037e16786-operator-scripts\") pod \"glance-db-create-5f84r\" (UID: \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\") " pod="openstack/glance-db-create-5f84r" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.946238 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmnp7\" (UniqueName: \"kubernetes.io/projected/ad2dcb68-2e9f-4104-9d7e-591638a67f60-kube-api-access-vmnp7\") pod \"glance-665e-account-create-update-h8l8x\" (UID: \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\") " pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.946370 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lndzl\" (UniqueName: \"kubernetes.io/projected/f1f2fc84-eafa-4f58-9b63-ae4037e16786-kube-api-access-lndzl\") pod \"glance-db-create-5f84r\" (UID: \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\") " pod="openstack/glance-db-create-5f84r" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.946650 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1f2fc84-eafa-4f58-9b63-ae4037e16786-operator-scripts\") pod \"glance-db-create-5f84r\" (UID: \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\") " pod="openstack/glance-db-create-5f84r" Jan 21 17:51:22 crc kubenswrapper[4799]: I0121 17:51:22.969523 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lndzl\" (UniqueName: \"kubernetes.io/projected/f1f2fc84-eafa-4f58-9b63-ae4037e16786-kube-api-access-lndzl\") pod \"glance-db-create-5f84r\" (UID: \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\") " pod="openstack/glance-db-create-5f84r" Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.023996 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5f84r" Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.048817 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad2dcb68-2e9f-4104-9d7e-591638a67f60-operator-scripts\") pod \"glance-665e-account-create-update-h8l8x\" (UID: \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\") " pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.048897 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmnp7\" (UniqueName: \"kubernetes.io/projected/ad2dcb68-2e9f-4104-9d7e-591638a67f60-kube-api-access-vmnp7\") pod \"glance-665e-account-create-update-h8l8x\" (UID: \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\") " pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.049956 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad2dcb68-2e9f-4104-9d7e-591638a67f60-operator-scripts\") pod \"glance-665e-account-create-update-h8l8x\" (UID: \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\") " pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.084246 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmnp7\" (UniqueName: \"kubernetes.io/projected/ad2dcb68-2e9f-4104-9d7e-591638a67f60-kube-api-access-vmnp7\") pod \"glance-665e-account-create-update-h8l8x\" (UID: \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\") " pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.113725 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.390904 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerStarted","Data":"4d8dd5bbca2bb8a38e11eec588bc659f8b91a05c9d99ded1b5c5f728c4a49beb"} Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.464863 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-665e-account-create-update-h8l8x"] Jan 21 17:51:23 crc kubenswrapper[4799]: W0121 17:51:23.467603 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad2dcb68_2e9f_4104_9d7e_591638a67f60.slice/crio-eca78404618f2794b61174613ba6130338defc1519b0f85015c9914312f9207c WatchSource:0}: Error finding container eca78404618f2794b61174613ba6130338defc1519b0f85015c9914312f9207c: Status 404 returned error can't find the container with id eca78404618f2794b61174613ba6130338defc1519b0f85015c9914312f9207c Jan 21 17:51:23 crc kubenswrapper[4799]: I0121 17:51:23.593693 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5f84r"] Jan 21 17:51:23 crc kubenswrapper[4799]: W0121 17:51:23.596176 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1f2fc84_eafa_4f58_9b63_ae4037e16786.slice/crio-b8b52efe3cabb3bd04feff345b6357229e2ec4978fd9beae9047c09578699915 WatchSource:0}: Error finding container b8b52efe3cabb3bd04feff345b6357229e2ec4978fd9beae9047c09578699915: Status 404 returned error can't find the container with id b8b52efe3cabb3bd04feff345b6357229e2ec4978fd9beae9047c09578699915 Jan 21 17:51:24 crc kubenswrapper[4799]: I0121 17:51:24.400333 4799 generic.go:334] "Generic (PLEG): container finished" podID="f1f2fc84-eafa-4f58-9b63-ae4037e16786" containerID="3f6d4019b3f2b9ff8583ebcde5a8afb203320ca0be9babcd6d86c8d667f41cdb" exitCode=0 Jan 21 17:51:24 crc kubenswrapper[4799]: I0121 17:51:24.400435 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5f84r" event={"ID":"f1f2fc84-eafa-4f58-9b63-ae4037e16786","Type":"ContainerDied","Data":"3f6d4019b3f2b9ff8583ebcde5a8afb203320ca0be9babcd6d86c8d667f41cdb"} Jan 21 17:51:24 crc kubenswrapper[4799]: I0121 17:51:24.401322 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5f84r" event={"ID":"f1f2fc84-eafa-4f58-9b63-ae4037e16786","Type":"ContainerStarted","Data":"b8b52efe3cabb3bd04feff345b6357229e2ec4978fd9beae9047c09578699915"} Jan 21 17:51:24 crc kubenswrapper[4799]: I0121 17:51:24.404179 4799 generic.go:334] "Generic (PLEG): container finished" podID="ad2dcb68-2e9f-4104-9d7e-591638a67f60" containerID="54090834d9347604133a6822ab003bcde63d929770baaca2799c25431f01583c" exitCode=0 Jan 21 17:51:24 crc kubenswrapper[4799]: I0121 17:51:24.404267 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-665e-account-create-update-h8l8x" event={"ID":"ad2dcb68-2e9f-4104-9d7e-591638a67f60","Type":"ContainerDied","Data":"54090834d9347604133a6822ab003bcde63d929770baaca2799c25431f01583c"} Jan 21 17:51:24 crc kubenswrapper[4799]: I0121 17:51:24.404342 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-665e-account-create-update-h8l8x" event={"ID":"ad2dcb68-2e9f-4104-9d7e-591638a67f60","Type":"ContainerStarted","Data":"eca78404618f2794b61174613ba6130338defc1519b0f85015c9914312f9207c"} Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.783668 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.790554 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5f84r" Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.905404 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad2dcb68-2e9f-4104-9d7e-591638a67f60-operator-scripts\") pod \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\" (UID: \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\") " Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.905791 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmnp7\" (UniqueName: \"kubernetes.io/projected/ad2dcb68-2e9f-4104-9d7e-591638a67f60-kube-api-access-vmnp7\") pod \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\" (UID: \"ad2dcb68-2e9f-4104-9d7e-591638a67f60\") " Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.905955 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad2dcb68-2e9f-4104-9d7e-591638a67f60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ad2dcb68-2e9f-4104-9d7e-591638a67f60" (UID: "ad2dcb68-2e9f-4104-9d7e-591638a67f60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.905973 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1f2fc84-eafa-4f58-9b63-ae4037e16786-operator-scripts\") pod \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\" (UID: \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\") " Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.906024 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lndzl\" (UniqueName: \"kubernetes.io/projected/f1f2fc84-eafa-4f58-9b63-ae4037e16786-kube-api-access-lndzl\") pod \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\" (UID: \"f1f2fc84-eafa-4f58-9b63-ae4037e16786\") " Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.906392 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1f2fc84-eafa-4f58-9b63-ae4037e16786-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f1f2fc84-eafa-4f58-9b63-ae4037e16786" (UID: "f1f2fc84-eafa-4f58-9b63-ae4037e16786"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.906754 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1f2fc84-eafa-4f58-9b63-ae4037e16786-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.906782 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad2dcb68-2e9f-4104-9d7e-591638a67f60-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.910835 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad2dcb68-2e9f-4104-9d7e-591638a67f60-kube-api-access-vmnp7" (OuterVolumeSpecName: "kube-api-access-vmnp7") pod "ad2dcb68-2e9f-4104-9d7e-591638a67f60" (UID: "ad2dcb68-2e9f-4104-9d7e-591638a67f60"). InnerVolumeSpecName "kube-api-access-vmnp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:25 crc kubenswrapper[4799]: I0121 17:51:25.911962 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1f2fc84-eafa-4f58-9b63-ae4037e16786-kube-api-access-lndzl" (OuterVolumeSpecName: "kube-api-access-lndzl") pod "f1f2fc84-eafa-4f58-9b63-ae4037e16786" (UID: "f1f2fc84-eafa-4f58-9b63-ae4037e16786"). InnerVolumeSpecName "kube-api-access-lndzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:26 crc kubenswrapper[4799]: I0121 17:51:26.009098 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lndzl\" (UniqueName: \"kubernetes.io/projected/f1f2fc84-eafa-4f58-9b63-ae4037e16786-kube-api-access-lndzl\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:26 crc kubenswrapper[4799]: I0121 17:51:26.009199 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmnp7\" (UniqueName: \"kubernetes.io/projected/ad2dcb68-2e9f-4104-9d7e-591638a67f60-kube-api-access-vmnp7\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:26 crc kubenswrapper[4799]: I0121 17:51:26.429905 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-665e-account-create-update-h8l8x" event={"ID":"ad2dcb68-2e9f-4104-9d7e-591638a67f60","Type":"ContainerDied","Data":"eca78404618f2794b61174613ba6130338defc1519b0f85015c9914312f9207c"} Jan 21 17:51:26 crc kubenswrapper[4799]: I0121 17:51:26.430003 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eca78404618f2794b61174613ba6130338defc1519b0f85015c9914312f9207c" Jan 21 17:51:26 crc kubenswrapper[4799]: I0121 17:51:26.430225 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-665e-account-create-update-h8l8x" Jan 21 17:51:26 crc kubenswrapper[4799]: I0121 17:51:26.434393 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5f84r" event={"ID":"f1f2fc84-eafa-4f58-9b63-ae4037e16786","Type":"ContainerDied","Data":"b8b52efe3cabb3bd04feff345b6357229e2ec4978fd9beae9047c09578699915"} Jan 21 17:51:26 crc kubenswrapper[4799]: I0121 17:51:26.434444 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8b52efe3cabb3bd04feff345b6357229e2ec4978fd9beae9047c09578699915" Jan 21 17:51:26 crc kubenswrapper[4799]: I0121 17:51:26.434451 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5f84r" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.955150 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-74m2t"] Jan 21 17:51:27 crc kubenswrapper[4799]: E0121 17:51:27.956085 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2dcb68-2e9f-4104-9d7e-591638a67f60" containerName="mariadb-account-create-update" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.956146 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2dcb68-2e9f-4104-9d7e-591638a67f60" containerName="mariadb-account-create-update" Jan 21 17:51:27 crc kubenswrapper[4799]: E0121 17:51:27.956178 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1f2fc84-eafa-4f58-9b63-ae4037e16786" containerName="mariadb-database-create" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.956189 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1f2fc84-eafa-4f58-9b63-ae4037e16786" containerName="mariadb-database-create" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.956494 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1f2fc84-eafa-4f58-9b63-ae4037e16786" containerName="mariadb-database-create" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.956517 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2dcb68-2e9f-4104-9d7e-591638a67f60" containerName="mariadb-account-create-update" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.957632 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.961398 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.961784 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hhc5h" Jan 21 17:51:27 crc kubenswrapper[4799]: I0121 17:51:27.976825 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-74m2t"] Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.054863 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-config-data\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.054930 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-db-sync-config-data\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.054958 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gbpn\" (UniqueName: \"kubernetes.io/projected/482b08ae-060f-465a-9085-20d742c22a13-kube-api-access-6gbpn\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.055036 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-combined-ca-bundle\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.156966 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-config-data\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.157055 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-db-sync-config-data\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.157093 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gbpn\" (UniqueName: \"kubernetes.io/projected/482b08ae-060f-465a-9085-20d742c22a13-kube-api-access-6gbpn\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.157206 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-combined-ca-bundle\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.163903 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-config-data\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.165708 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-db-sync-config-data\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.168075 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-combined-ca-bundle\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.177505 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gbpn\" (UniqueName: \"kubernetes.io/projected/482b08ae-060f-465a-9085-20d742c22a13-kube-api-access-6gbpn\") pod \"glance-db-sync-74m2t\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:28 crc kubenswrapper[4799]: I0121 17:51:28.315976 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-74m2t" Jan 21 17:51:29 crc kubenswrapper[4799]: I0121 17:51:28.882300 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-74m2t"] Jan 21 17:51:29 crc kubenswrapper[4799]: I0121 17:51:29.474381 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-74m2t" event={"ID":"482b08ae-060f-465a-9085-20d742c22a13","Type":"ContainerStarted","Data":"c4c9980e58390a4f0beb925be2c90ea7f02e24562f01b3d94ad8d70f1ea3d57c"} Jan 21 17:51:30 crc kubenswrapper[4799]: I0121 17:51:30.486636 4799 generic.go:334] "Generic (PLEG): container finished" podID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerID="4d8dd5bbca2bb8a38e11eec588bc659f8b91a05c9d99ded1b5c5f728c4a49beb" exitCode=0 Jan 21 17:51:30 crc kubenswrapper[4799]: I0121 17:51:30.486688 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerDied","Data":"4d8dd5bbca2bb8a38e11eec588bc659f8b91a05c9d99ded1b5c5f728c4a49beb"} Jan 21 17:51:30 crc kubenswrapper[4799]: I0121 17:51:30.938340 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.373236 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-t49pc"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.374983 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.393237 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-t49pc"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.404183 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-c43d-account-create-update-c7d49"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.405670 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.408269 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.412962 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c43d-account-create-update-c7d49"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.508513 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerStarted","Data":"62b90ea328d32659a6d409e4ad624ce3599c941829209ec8c107a40282ee0291"} Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.534776 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-h9cvz"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.536183 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.545326 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2hfj\" (UniqueName: \"kubernetes.io/projected/74829fa6-f20b-437c-9a76-e336eeb52598-kube-api-access-v2hfj\") pod \"barbican-db-create-t49pc\" (UID: \"74829fa6-f20b-437c-9a76-e336eeb52598\") " pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.545571 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74829fa6-f20b-437c-9a76-e336eeb52598-operator-scripts\") pod \"barbican-db-create-t49pc\" (UID: \"74829fa6-f20b-437c-9a76-e336eeb52598\") " pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.545684 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk82d\" (UniqueName: \"kubernetes.io/projected/237ef136-a48e-462a-b261-c7f2e386a15e-kube-api-access-xk82d\") pod \"cinder-c43d-account-create-update-c7d49\" (UID: \"237ef136-a48e-462a-b261-c7f2e386a15e\") " pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.545855 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/237ef136-a48e-462a-b261-c7f2e386a15e-operator-scripts\") pod \"cinder-c43d-account-create-update-c7d49\" (UID: \"237ef136-a48e-462a-b261-c7f2e386a15e\") " pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.550972 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-h9cvz"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.563959 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-6zwqt"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.565820 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.569960 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-4zzl8" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.580733 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.605456 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-6zwqt"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.625534 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-40ee-account-create-update-76684"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.627356 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.631857 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.642935 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-40ee-account-create-update-76684"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647226 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/237ef136-a48e-462a-b261-c7f2e386a15e-operator-scripts\") pod \"cinder-c43d-account-create-update-c7d49\" (UID: \"237ef136-a48e-462a-b261-c7f2e386a15e\") " pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647317 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-config-data\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647394 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqmh4\" (UniqueName: \"kubernetes.io/projected/900a87e2-8f11-4a39-8b54-59283d6fc6c2-kube-api-access-xqmh4\") pod \"cinder-db-create-h9cvz\" (UID: \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\") " pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647438 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-combined-ca-bundle\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647463 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900a87e2-8f11-4a39-8b54-59283d6fc6c2-operator-scripts\") pod \"cinder-db-create-h9cvz\" (UID: \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\") " pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647485 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2hfj\" (UniqueName: \"kubernetes.io/projected/74829fa6-f20b-437c-9a76-e336eeb52598-kube-api-access-v2hfj\") pod \"barbican-db-create-t49pc\" (UID: \"74829fa6-f20b-437c-9a76-e336eeb52598\") " pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647579 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-db-sync-config-data\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647643 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74829fa6-f20b-437c-9a76-e336eeb52598-operator-scripts\") pod \"barbican-db-create-t49pc\" (UID: \"74829fa6-f20b-437c-9a76-e336eeb52598\") " pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647695 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk82d\" (UniqueName: \"kubernetes.io/projected/237ef136-a48e-462a-b261-c7f2e386a15e-kube-api-access-xk82d\") pod \"cinder-c43d-account-create-update-c7d49\" (UID: \"237ef136-a48e-462a-b261-c7f2e386a15e\") " pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.647750 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pr8b\" (UniqueName: \"kubernetes.io/projected/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-kube-api-access-9pr8b\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.648593 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74829fa6-f20b-437c-9a76-e336eeb52598-operator-scripts\") pod \"barbican-db-create-t49pc\" (UID: \"74829fa6-f20b-437c-9a76-e336eeb52598\") " pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.648600 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/237ef136-a48e-462a-b261-c7f2e386a15e-operator-scripts\") pod \"cinder-c43d-account-create-update-c7d49\" (UID: \"237ef136-a48e-462a-b261-c7f2e386a15e\") " pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.668013 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2hfj\" (UniqueName: \"kubernetes.io/projected/74829fa6-f20b-437c-9a76-e336eeb52598-kube-api-access-v2hfj\") pod \"barbican-db-create-t49pc\" (UID: \"74829fa6-f20b-437c-9a76-e336eeb52598\") " pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.686750 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk82d\" (UniqueName: \"kubernetes.io/projected/237ef136-a48e-462a-b261-c7f2e386a15e-kube-api-access-xk82d\") pod \"cinder-c43d-account-create-update-c7d49\" (UID: \"237ef136-a48e-462a-b261-c7f2e386a15e\") " pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.691666 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.738595 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.750627 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nz79\" (UniqueName: \"kubernetes.io/projected/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-kube-api-access-8nz79\") pod \"barbican-40ee-account-create-update-76684\" (UID: \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\") " pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.750694 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqmh4\" (UniqueName: \"kubernetes.io/projected/900a87e2-8f11-4a39-8b54-59283d6fc6c2-kube-api-access-xqmh4\") pod \"cinder-db-create-h9cvz\" (UID: \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\") " pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.750741 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-combined-ca-bundle\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.750769 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900a87e2-8f11-4a39-8b54-59283d6fc6c2-operator-scripts\") pod \"cinder-db-create-h9cvz\" (UID: \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\") " pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.750787 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-operator-scripts\") pod \"barbican-40ee-account-create-update-76684\" (UID: \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\") " pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.750816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-db-sync-config-data\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.750848 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pr8b\" (UniqueName: \"kubernetes.io/projected/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-kube-api-access-9pr8b\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.750916 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-config-data\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.752338 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900a87e2-8f11-4a39-8b54-59283d6fc6c2-operator-scripts\") pod \"cinder-db-create-h9cvz\" (UID: \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\") " pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.759887 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-db-sync-config-data\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.760095 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-combined-ca-bundle\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.763255 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-config-data\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.772740 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-vlrpv"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.774543 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.778839 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.779299 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.779420 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-phg79" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.779478 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.791342 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqmh4\" (UniqueName: \"kubernetes.io/projected/900a87e2-8f11-4a39-8b54-59283d6fc6c2-kube-api-access-xqmh4\") pod \"cinder-db-create-h9cvz\" (UID: \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\") " pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.798501 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pr8b\" (UniqueName: \"kubernetes.io/projected/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-kube-api-access-9pr8b\") pod \"watcher-db-sync-6zwqt\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.866017 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.867356 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-combined-ca-bundle\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.867402 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-config-data\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.867429 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nz79\" (UniqueName: \"kubernetes.io/projected/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-kube-api-access-8nz79\") pod \"barbican-40ee-account-create-update-76684\" (UID: \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\") " pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.867471 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xbbb\" (UniqueName: \"kubernetes.io/projected/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-kube-api-access-4xbbb\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.867518 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-operator-scripts\") pod \"barbican-40ee-account-create-update-76684\" (UID: \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\") " pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.878513 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-operator-scripts\") pod \"barbican-40ee-account-create-update-76684\" (UID: \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\") " pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.880852 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-hzwxh"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.882432 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.900062 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.903932 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nz79\" (UniqueName: \"kubernetes.io/projected/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-kube-api-access-8nz79\") pod \"barbican-40ee-account-create-update-76684\" (UID: \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\") " pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.916201 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vlrpv"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.923259 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8290-account-create-update-tqxrl"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.925814 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.928369 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.931378 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-hzwxh"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.941935 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.956974 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8290-account-create-update-tqxrl"] Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.971148 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-config-data\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.972063 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xbbb\" (UniqueName: \"kubernetes.io/projected/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-kube-api-access-4xbbb\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.972551 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-combined-ca-bundle\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:31 crc kubenswrapper[4799]: I0121 17:51:31.978804 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-config-data\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:31.999995 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-combined-ca-bundle\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.020989 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xbbb\" (UniqueName: \"kubernetes.io/projected/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-kube-api-access-4xbbb\") pod \"keystone-db-sync-vlrpv\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.075969 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqnf8\" (UniqueName: \"kubernetes.io/projected/95c5771e-7cc6-4529-a3fb-f8568b69a74a-kube-api-access-xqnf8\") pod \"neutron-db-create-hzwxh\" (UID: \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\") " pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.076065 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b36cad-2226-43fc-840c-1b44fa673bcb-operator-scripts\") pod \"neutron-8290-account-create-update-tqxrl\" (UID: \"85b36cad-2226-43fc-840c-1b44fa673bcb\") " pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.076158 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnv24\" (UniqueName: \"kubernetes.io/projected/85b36cad-2226-43fc-840c-1b44fa673bcb-kube-api-access-qnv24\") pod \"neutron-8290-account-create-update-tqxrl\" (UID: \"85b36cad-2226-43fc-840c-1b44fa673bcb\") " pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.076190 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95c5771e-7cc6-4529-a3fb-f8568b69a74a-operator-scripts\") pod \"neutron-db-create-hzwxh\" (UID: \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\") " pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.179310 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b36cad-2226-43fc-840c-1b44fa673bcb-operator-scripts\") pod \"neutron-8290-account-create-update-tqxrl\" (UID: \"85b36cad-2226-43fc-840c-1b44fa673bcb\") " pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.179571 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnv24\" (UniqueName: \"kubernetes.io/projected/85b36cad-2226-43fc-840c-1b44fa673bcb-kube-api-access-qnv24\") pod \"neutron-8290-account-create-update-tqxrl\" (UID: \"85b36cad-2226-43fc-840c-1b44fa673bcb\") " pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.179644 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95c5771e-7cc6-4529-a3fb-f8568b69a74a-operator-scripts\") pod \"neutron-db-create-hzwxh\" (UID: \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\") " pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.179752 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqnf8\" (UniqueName: \"kubernetes.io/projected/95c5771e-7cc6-4529-a3fb-f8568b69a74a-kube-api-access-xqnf8\") pod \"neutron-db-create-hzwxh\" (UID: \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\") " pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.184434 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95c5771e-7cc6-4529-a3fb-f8568b69a74a-operator-scripts\") pod \"neutron-db-create-hzwxh\" (UID: \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\") " pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.189480 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b36cad-2226-43fc-840c-1b44fa673bcb-operator-scripts\") pod \"neutron-8290-account-create-update-tqxrl\" (UID: \"85b36cad-2226-43fc-840c-1b44fa673bcb\") " pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.208665 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.289736 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqnf8\" (UniqueName: \"kubernetes.io/projected/95c5771e-7cc6-4529-a3fb-f8568b69a74a-kube-api-access-xqnf8\") pod \"neutron-db-create-hzwxh\" (UID: \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\") " pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.293093 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnv24\" (UniqueName: \"kubernetes.io/projected/85b36cad-2226-43fc-840c-1b44fa673bcb-kube-api-access-qnv24\") pod \"neutron-8290-account-create-update-tqxrl\" (UID: \"85b36cad-2226-43fc-840c-1b44fa673bcb\") " pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.405217 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-t49pc"] Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.501403 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c43d-account-create-update-c7d49"] Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.528208 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.550176 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.571841 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-40ee-account-create-update-76684"] Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.591246 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-6zwqt"] Jan 21 17:51:32 crc kubenswrapper[4799]: W0121 17:51:32.603512 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod237ef136_a48e_462a_b261_c7f2e386a15e.slice/crio-7d94492e42ecff0543a77bb6201f97e34531b89858ebf6716687dbbc43367356 WatchSource:0}: Error finding container 7d94492e42ecff0543a77bb6201f97e34531b89858ebf6716687dbbc43367356: Status 404 returned error can't find the container with id 7d94492e42ecff0543a77bb6201f97e34531b89858ebf6716687dbbc43367356 Jan 21 17:51:32 crc kubenswrapper[4799]: W0121 17:51:32.604803 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74829fa6_f20b_437c_9a76_e336eeb52598.slice/crio-b0e6739f9b0129a8fe2490bc49427856efe8a6cf8ae2eeacc536e89c5848c592 WatchSource:0}: Error finding container b0e6739f9b0129a8fe2490bc49427856efe8a6cf8ae2eeacc536e89c5848c592: Status 404 returned error can't find the container with id b0e6739f9b0129a8fe2490bc49427856efe8a6cf8ae2eeacc536e89c5848c592 Jan 21 17:51:32 crc kubenswrapper[4799]: W0121 17:51:32.608190 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb91e7f5a_8ad9_4965_8839_dc43dcfbaada.slice/crio-e9faebb107b76a34b37da24cc9457fb829d8b0a92e5c36cc04101a413378120b WatchSource:0}: Error finding container e9faebb107b76a34b37da24cc9457fb829d8b0a92e5c36cc04101a413378120b: Status 404 returned error can't find the container with id e9faebb107b76a34b37da24cc9457fb829d8b0a92e5c36cc04101a413378120b Jan 21 17:51:32 crc kubenswrapper[4799]: I0121 17:51:32.714697 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-h9cvz"] Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.085595 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vlrpv"] Jan 21 17:51:33 crc kubenswrapper[4799]: W0121 17:51:33.201552 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod112b4dcd_ad4f_40da_9ec8_27bf53f989a8.slice/crio-b75f521e0fbbfb0b7faddb7c26fec6212fe7b3f9a32e30a8ac34d767185b39c1 WatchSource:0}: Error finding container b75f521e0fbbfb0b7faddb7c26fec6212fe7b3f9a32e30a8ac34d767185b39c1: Status 404 returned error can't find the container with id b75f521e0fbbfb0b7faddb7c26fec6212fe7b3f9a32e30a8ac34d767185b39c1 Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.290560 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-hzwxh"] Jan 21 17:51:33 crc kubenswrapper[4799]: W0121 17:51:33.297539 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95c5771e_7cc6_4529_a3fb_f8568b69a74a.slice/crio-6b520e8329732c48dab0de2d1dfd8996101a501fdac946a58df965ed0b27f153 WatchSource:0}: Error finding container 6b520e8329732c48dab0de2d1dfd8996101a501fdac946a58df965ed0b27f153: Status 404 returned error can't find the container with id 6b520e8329732c48dab0de2d1dfd8996101a501fdac946a58df965ed0b27f153 Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.364714 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8290-account-create-update-tqxrl"] Jan 21 17:51:33 crc kubenswrapper[4799]: W0121 17:51:33.365321 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85b36cad_2226_43fc_840c_1b44fa673bcb.slice/crio-7fff1694ff96ce4b40971c241e63dd1787bcaed5ba1abeef99074114333e199f WatchSource:0}: Error finding container 7fff1694ff96ce4b40971c241e63dd1787bcaed5ba1abeef99074114333e199f: Status 404 returned error can't find the container with id 7fff1694ff96ce4b40971c241e63dd1787bcaed5ba1abeef99074114333e199f Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.532771 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-40ee-account-create-update-76684" event={"ID":"b91e7f5a-8ad9-4965-8839-dc43dcfbaada","Type":"ContainerStarted","Data":"e9faebb107b76a34b37da24cc9457fb829d8b0a92e5c36cc04101a413378120b"} Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.534176 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-6zwqt" event={"ID":"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd","Type":"ContainerStarted","Data":"284e0f9598bf17a2ff5c8eabb7d3b8a6f95b84df2c8b707060dfb399b1234857"} Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.535712 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8290-account-create-update-tqxrl" event={"ID":"85b36cad-2226-43fc-840c-1b44fa673bcb","Type":"ContainerStarted","Data":"7fff1694ff96ce4b40971c241e63dd1787bcaed5ba1abeef99074114333e199f"} Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.537006 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hzwxh" event={"ID":"95c5771e-7cc6-4529-a3fb-f8568b69a74a","Type":"ContainerStarted","Data":"6b520e8329732c48dab0de2d1dfd8996101a501fdac946a58df965ed0b27f153"} Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.538266 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-t49pc" event={"ID":"74829fa6-f20b-437c-9a76-e336eeb52598","Type":"ContainerStarted","Data":"b0e6739f9b0129a8fe2490bc49427856efe8a6cf8ae2eeacc536e89c5848c592"} Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.539285 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vlrpv" event={"ID":"112b4dcd-ad4f-40da-9ec8-27bf53f989a8","Type":"ContainerStarted","Data":"b75f521e0fbbfb0b7faddb7c26fec6212fe7b3f9a32e30a8ac34d767185b39c1"} Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.541836 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-h9cvz" event={"ID":"900a87e2-8f11-4a39-8b54-59283d6fc6c2","Type":"ContainerStarted","Data":"387a47d717b83b1b72c99ff27577df30a2b981cdf5f0de92e0dadbce3ad074fc"} Jan 21 17:51:33 crc kubenswrapper[4799]: I0121 17:51:33.543403 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c43d-account-create-update-c7d49" event={"ID":"237ef136-a48e-462a-b261-c7f2e386a15e","Type":"ContainerStarted","Data":"7d94492e42ecff0543a77bb6201f97e34531b89858ebf6716687dbbc43367356"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.562564 4799 generic.go:334] "Generic (PLEG): container finished" podID="237ef136-a48e-462a-b261-c7f2e386a15e" containerID="96ae220119a2db24f19db09016369e25e284599bf2c06418e794ce843dcdf43e" exitCode=0 Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.562667 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c43d-account-create-update-c7d49" event={"ID":"237ef136-a48e-462a-b261-c7f2e386a15e","Type":"ContainerDied","Data":"96ae220119a2db24f19db09016369e25e284599bf2c06418e794ce843dcdf43e"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.565705 4799 generic.go:334] "Generic (PLEG): container finished" podID="b91e7f5a-8ad9-4965-8839-dc43dcfbaada" containerID="ef5851f3212dc38d3c345d96297e4ee5f40779ad626080f7c5b340a827b84051" exitCode=0 Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.565853 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-40ee-account-create-update-76684" event={"ID":"b91e7f5a-8ad9-4965-8839-dc43dcfbaada","Type":"ContainerDied","Data":"ef5851f3212dc38d3c345d96297e4ee5f40779ad626080f7c5b340a827b84051"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.575421 4799 generic.go:334] "Generic (PLEG): container finished" podID="85b36cad-2226-43fc-840c-1b44fa673bcb" containerID="6a08c663c40d53231c4a0d4cbaddf88e051773b48c26f19eaae4d2e08d2000f7" exitCode=0 Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.575495 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8290-account-create-update-tqxrl" event={"ID":"85b36cad-2226-43fc-840c-1b44fa673bcb","Type":"ContainerDied","Data":"6a08c663c40d53231c4a0d4cbaddf88e051773b48c26f19eaae4d2e08d2000f7"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.580382 4799 generic.go:334] "Generic (PLEG): container finished" podID="95c5771e-7cc6-4529-a3fb-f8568b69a74a" containerID="bf1d4dd3499e3170b73ed69258aebf4787d5264aa5f62904658bf6f327e6bf56" exitCode=0 Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.580453 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hzwxh" event={"ID":"95c5771e-7cc6-4529-a3fb-f8568b69a74a","Type":"ContainerDied","Data":"bf1d4dd3499e3170b73ed69258aebf4787d5264aa5f62904658bf6f327e6bf56"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.600528 4799 generic.go:334] "Generic (PLEG): container finished" podID="74829fa6-f20b-437c-9a76-e336eeb52598" containerID="23baeb6ec3fb103afb54fc4934b124f7e4a47926b3f3d9e8d5151da9b31f350e" exitCode=0 Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.600623 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-t49pc" event={"ID":"74829fa6-f20b-437c-9a76-e336eeb52598","Type":"ContainerDied","Data":"23baeb6ec3fb103afb54fc4934b124f7e4a47926b3f3d9e8d5151da9b31f350e"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.614526 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerStarted","Data":"fa486aa1317a4359b8979a53f7d7df537d219d03994fe2085b132a44ebc30d8f"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.614574 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerStarted","Data":"6a65adc16182214bd6ea23d9c0f4788f3d3c92e7d8b2f14e3f9aeb765615d6d8"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.642112 4799 generic.go:334] "Generic (PLEG): container finished" podID="900a87e2-8f11-4a39-8b54-59283d6fc6c2" containerID="57223c5a296c05233437e22c0ec2240b21d77c771f5ab4a77c16b07e24f675be" exitCode=0 Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.642182 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-h9cvz" event={"ID":"900a87e2-8f11-4a39-8b54-59283d6fc6c2","Type":"ContainerDied","Data":"57223c5a296c05233437e22c0ec2240b21d77c771f5ab4a77c16b07e24f675be"} Jan 21 17:51:34 crc kubenswrapper[4799]: I0121 17:51:34.720432 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=16.720393994 podStartE2EDuration="16.720393994s" podCreationTimestamp="2026-01-21 17:51:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:51:34.718923432 +0000 UTC m=+1121.345213445" watchObservedRunningTime="2026-01-21 17:51:34.720393994 +0000 UTC m=+1121.346684027" Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.436609 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.524202 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqmh4\" (UniqueName: \"kubernetes.io/projected/900a87e2-8f11-4a39-8b54-59283d6fc6c2-kube-api-access-xqmh4\") pod \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\" (UID: \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\") " Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.524282 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900a87e2-8f11-4a39-8b54-59283d6fc6c2-operator-scripts\") pod \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\" (UID: \"900a87e2-8f11-4a39-8b54-59283d6fc6c2\") " Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.524809 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/900a87e2-8f11-4a39-8b54-59283d6fc6c2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "900a87e2-8f11-4a39-8b54-59283d6fc6c2" (UID: "900a87e2-8f11-4a39-8b54-59283d6fc6c2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.530479 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/900a87e2-8f11-4a39-8b54-59283d6fc6c2-kube-api-access-xqmh4" (OuterVolumeSpecName: "kube-api-access-xqmh4") pod "900a87e2-8f11-4a39-8b54-59283d6fc6c2" (UID: "900a87e2-8f11-4a39-8b54-59283d6fc6c2"). InnerVolumeSpecName "kube-api-access-xqmh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.627719 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqmh4\" (UniqueName: \"kubernetes.io/projected/900a87e2-8f11-4a39-8b54-59283d6fc6c2-kube-api-access-xqmh4\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.627766 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900a87e2-8f11-4a39-8b54-59283d6fc6c2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.686097 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-h9cvz" event={"ID":"900a87e2-8f11-4a39-8b54-59283d6fc6c2","Type":"ContainerDied","Data":"387a47d717b83b1b72c99ff27577df30a2b981cdf5f0de92e0dadbce3ad074fc"} Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.686179 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="387a47d717b83b1b72c99ff27577df30a2b981cdf5f0de92e0dadbce3ad074fc" Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.686264 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-h9cvz" Jan 21 17:51:38 crc kubenswrapper[4799]: I0121 17:51:38.749989 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:42 crc kubenswrapper[4799]: I0121 17:51:42.201291 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:51:42 crc kubenswrapper[4799]: I0121 17:51:42.211105 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/771ea47a-76eb-434d-ac1f-cf6048f08237-etc-swift\") pod \"swift-storage-0\" (UID: \"771ea47a-76eb-434d-ac1f-cf6048f08237\") " pod="openstack/swift-storage-0" Jan 21 17:51:42 crc kubenswrapper[4799]: I0121 17:51:42.227575 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.198599 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.238263 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk82d\" (UniqueName: \"kubernetes.io/projected/237ef136-a48e-462a-b261-c7f2e386a15e-kube-api-access-xk82d\") pod \"237ef136-a48e-462a-b261-c7f2e386a15e\" (UID: \"237ef136-a48e-462a-b261-c7f2e386a15e\") " Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.238580 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/237ef136-a48e-462a-b261-c7f2e386a15e-operator-scripts\") pod \"237ef136-a48e-462a-b261-c7f2e386a15e\" (UID: \"237ef136-a48e-462a-b261-c7f2e386a15e\") " Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.240701 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/237ef136-a48e-462a-b261-c7f2e386a15e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "237ef136-a48e-462a-b261-c7f2e386a15e" (UID: "237ef136-a48e-462a-b261-c7f2e386a15e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.246266 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237ef136-a48e-462a-b261-c7f2e386a15e-kube-api-access-xk82d" (OuterVolumeSpecName: "kube-api-access-xk82d") pod "237ef136-a48e-462a-b261-c7f2e386a15e" (UID: "237ef136-a48e-462a-b261-c7f2e386a15e"). InnerVolumeSpecName "kube-api-access-xk82d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.342077 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/237ef136-a48e-462a-b261-c7f2e386a15e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.342113 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk82d\" (UniqueName: \"kubernetes.io/projected/237ef136-a48e-462a-b261-c7f2e386a15e-kube-api-access-xk82d\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.732399 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c43d-account-create-update-c7d49" event={"ID":"237ef136-a48e-462a-b261-c7f2e386a15e","Type":"ContainerDied","Data":"7d94492e42ecff0543a77bb6201f97e34531b89858ebf6716687dbbc43367356"} Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.732450 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d94492e42ecff0543a77bb6201f97e34531b89858ebf6716687dbbc43367356" Jan 21 17:51:43 crc kubenswrapper[4799]: I0121 17:51:43.732535 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c43d-account-create-update-c7d49" Jan 21 17:51:48 crc kubenswrapper[4799]: I0121 17:51:48.750004 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:48 crc kubenswrapper[4799]: I0121 17:51:48.757496 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:48 crc kubenswrapper[4799]: I0121 17:51:48.801563 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.390525 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-glance-api:watcher_latest" Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.391107 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-glance-api:watcher_latest" Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.391313 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:38.102.83.30:5001/podified-master-centos10/openstack-glance-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6gbpn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-74m2t_openstack(482b08ae-060f-465a-9085-20d742c22a13): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.392479 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-74m2t" podUID="482b08ae-060f-465a-9085-20d742c22a13" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.441688 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.447710 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.453081 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.469598 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.558016 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2hfj\" (UniqueName: \"kubernetes.io/projected/74829fa6-f20b-437c-9a76-e336eeb52598-kube-api-access-v2hfj\") pod \"74829fa6-f20b-437c-9a76-e336eeb52598\" (UID: \"74829fa6-f20b-437c-9a76-e336eeb52598\") " Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.558611 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74829fa6-f20b-437c-9a76-e336eeb52598-operator-scripts\") pod \"74829fa6-f20b-437c-9a76-e336eeb52598\" (UID: \"74829fa6-f20b-437c-9a76-e336eeb52598\") " Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.558698 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95c5771e-7cc6-4529-a3fb-f8568b69a74a-operator-scripts\") pod \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\" (UID: \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\") " Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.558727 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-operator-scripts\") pod \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\" (UID: \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\") " Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.558773 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnv24\" (UniqueName: \"kubernetes.io/projected/85b36cad-2226-43fc-840c-1b44fa673bcb-kube-api-access-qnv24\") pod \"85b36cad-2226-43fc-840c-1b44fa673bcb\" (UID: \"85b36cad-2226-43fc-840c-1b44fa673bcb\") " Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.558820 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nz79\" (UniqueName: \"kubernetes.io/projected/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-kube-api-access-8nz79\") pod \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\" (UID: \"b91e7f5a-8ad9-4965-8839-dc43dcfbaada\") " Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.558850 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqnf8\" (UniqueName: \"kubernetes.io/projected/95c5771e-7cc6-4529-a3fb-f8568b69a74a-kube-api-access-xqnf8\") pod \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\" (UID: \"95c5771e-7cc6-4529-a3fb-f8568b69a74a\") " Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.560294 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74829fa6-f20b-437c-9a76-e336eeb52598-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "74829fa6-f20b-437c-9a76-e336eeb52598" (UID: "74829fa6-f20b-437c-9a76-e336eeb52598"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.560310 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b91e7f5a-8ad9-4965-8839-dc43dcfbaada" (UID: "b91e7f5a-8ad9-4965-8839-dc43dcfbaada"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.560881 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95c5771e-7cc6-4529-a3fb-f8568b69a74a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "95c5771e-7cc6-4529-a3fb-f8568b69a74a" (UID: "95c5771e-7cc6-4529-a3fb-f8568b69a74a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.561995 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74829fa6-f20b-437c-9a76-e336eeb52598-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.562028 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95c5771e-7cc6-4529-a3fb-f8568b69a74a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.562040 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.564700 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-kube-api-access-8nz79" (OuterVolumeSpecName: "kube-api-access-8nz79") pod "b91e7f5a-8ad9-4965-8839-dc43dcfbaada" (UID: "b91e7f5a-8ad9-4965-8839-dc43dcfbaada"). InnerVolumeSpecName "kube-api-access-8nz79". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.564859 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74829fa6-f20b-437c-9a76-e336eeb52598-kube-api-access-v2hfj" (OuterVolumeSpecName: "kube-api-access-v2hfj") pod "74829fa6-f20b-437c-9a76-e336eeb52598" (UID: "74829fa6-f20b-437c-9a76-e336eeb52598"). InnerVolumeSpecName "kube-api-access-v2hfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.567195 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95c5771e-7cc6-4529-a3fb-f8568b69a74a-kube-api-access-xqnf8" (OuterVolumeSpecName: "kube-api-access-xqnf8") pod "95c5771e-7cc6-4529-a3fb-f8568b69a74a" (UID: "95c5771e-7cc6-4529-a3fb-f8568b69a74a"). InnerVolumeSpecName "kube-api-access-xqnf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.574301 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85b36cad-2226-43fc-840c-1b44fa673bcb-kube-api-access-qnv24" (OuterVolumeSpecName: "kube-api-access-qnv24") pod "85b36cad-2226-43fc-840c-1b44fa673bcb" (UID: "85b36cad-2226-43fc-840c-1b44fa673bcb"). InnerVolumeSpecName "kube-api-access-qnv24". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.663261 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b36cad-2226-43fc-840c-1b44fa673bcb-operator-scripts\") pod \"85b36cad-2226-43fc-840c-1b44fa673bcb\" (UID: \"85b36cad-2226-43fc-840c-1b44fa673bcb\") " Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.663761 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnv24\" (UniqueName: \"kubernetes.io/projected/85b36cad-2226-43fc-840c-1b44fa673bcb-kube-api-access-qnv24\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.663784 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nz79\" (UniqueName: \"kubernetes.io/projected/b91e7f5a-8ad9-4965-8839-dc43dcfbaada-kube-api-access-8nz79\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.663761 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85b36cad-2226-43fc-840c-1b44fa673bcb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85b36cad-2226-43fc-840c-1b44fa673bcb" (UID: "85b36cad-2226-43fc-840c-1b44fa673bcb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.663798 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqnf8\" (UniqueName: \"kubernetes.io/projected/95c5771e-7cc6-4529-a3fb-f8568b69a74a-kube-api-access-xqnf8\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.663929 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2hfj\" (UniqueName: \"kubernetes.io/projected/74829fa6-f20b-437c-9a76-e336eeb52598-kube-api-access-v2hfj\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.766283 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85b36cad-2226-43fc-840c-1b44fa673bcb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.847257 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-hzwxh" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.847276 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-hzwxh" event={"ID":"95c5771e-7cc6-4529-a3fb-f8568b69a74a","Type":"ContainerDied","Data":"6b520e8329732c48dab0de2d1dfd8996101a501fdac946a58df965ed0b27f153"} Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.847361 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b520e8329732c48dab0de2d1dfd8996101a501fdac946a58df965ed0b27f153" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.849450 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-t49pc" event={"ID":"74829fa6-f20b-437c-9a76-e336eeb52598","Type":"ContainerDied","Data":"b0e6739f9b0129a8fe2490bc49427856efe8a6cf8ae2eeacc536e89c5848c592"} Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.849497 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-t49pc" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.849543 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0e6739f9b0129a8fe2490bc49427856efe8a6cf8ae2eeacc536e89c5848c592" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.851508 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-40ee-account-create-update-76684" event={"ID":"b91e7f5a-8ad9-4965-8839-dc43dcfbaada","Type":"ContainerDied","Data":"e9faebb107b76a34b37da24cc9457fb829d8b0a92e5c36cc04101a413378120b"} Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.851529 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-40ee-account-create-update-76684" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.851538 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9faebb107b76a34b37da24cc9457fb829d8b0a92e5c36cc04101a413378120b" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.854109 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8290-account-create-update-tqxrl" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.856307 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8290-account-create-update-tqxrl" event={"ID":"85b36cad-2226-43fc-840c-1b44fa673bcb","Type":"ContainerDied","Data":"7fff1694ff96ce4b40971c241e63dd1787bcaed5ba1abeef99074114333e199f"} Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.856380 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-glance-api:watcher_latest\\\"\"" pod="openstack/glance-db-sync-74m2t" podUID="482b08ae-060f-465a-9085-20d742c22a13" Jan 21 17:51:53 crc kubenswrapper[4799]: I0121 17:51:53.856435 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fff1694ff96ce4b40971c241e63dd1787bcaed5ba1abeef99074114333e199f" Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.898913 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-watcher-api:watcher_latest" Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.898964 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-watcher-api:watcher_latest" Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.899111 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:watcher-db-sync,Image:38.102.83.30:5001/podified-master-centos10/openstack-watcher-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/watcher/watcher.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:watcher-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9pr8b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-db-sync-6zwqt_openstack(27ad5c23-b3d3-41a1-a4ae-4821eb3524fd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:51:53 crc kubenswrapper[4799]: E0121 17:51:53.900388 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/watcher-db-sync-6zwqt" podUID="27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" Jan 21 17:51:54 crc kubenswrapper[4799]: I0121 17:51:54.442175 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 21 17:51:54 crc kubenswrapper[4799]: W0121 17:51:54.442741 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod771ea47a_76eb_434d_ac1f_cf6048f08237.slice/crio-e4a5e2748cbb6e1856eccb89d6c6cae5f753f5efcda818e3aa236ab9c006ba6a WatchSource:0}: Error finding container e4a5e2748cbb6e1856eccb89d6c6cae5f753f5efcda818e3aa236ab9c006ba6a: Status 404 returned error can't find the container with id e4a5e2748cbb6e1856eccb89d6c6cae5f753f5efcda818e3aa236ab9c006ba6a Jan 21 17:51:54 crc kubenswrapper[4799]: I0121 17:51:54.864299 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vlrpv" event={"ID":"112b4dcd-ad4f-40da-9ec8-27bf53f989a8","Type":"ContainerStarted","Data":"7277835ea22dd4eccf648b43927e7b41037756a06015668489d45fd127056e93"} Jan 21 17:51:54 crc kubenswrapper[4799]: I0121 17:51:54.868677 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"e4a5e2748cbb6e1856eccb89d6c6cae5f753f5efcda818e3aa236ab9c006ba6a"} Jan 21 17:51:54 crc kubenswrapper[4799]: E0121 17:51:54.876182 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-watcher-api:watcher_latest\\\"\"" pod="openstack/watcher-db-sync-6zwqt" podUID="27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" Jan 21 17:51:54 crc kubenswrapper[4799]: I0121 17:51:54.893946 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-vlrpv" podStartSLOduration=3.207757202 podStartE2EDuration="23.893920758s" podCreationTimestamp="2026-01-21 17:51:31 +0000 UTC" firstStartedPulling="2026-01-21 17:51:33.206272528 +0000 UTC m=+1119.832562551" lastFinishedPulling="2026-01-21 17:51:53.892436084 +0000 UTC m=+1140.518726107" observedRunningTime="2026-01-21 17:51:54.88579854 +0000 UTC m=+1141.512088573" watchObservedRunningTime="2026-01-21 17:51:54.893920758 +0000 UTC m=+1141.520210781" Jan 21 17:51:55 crc kubenswrapper[4799]: I0121 17:51:55.883164 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"69771565610a0bc810bd9df60f178469f570ff06075dfa7b223a4e1a9ea90fb3"} Jan 21 17:51:55 crc kubenswrapper[4799]: I0121 17:51:55.884076 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"2ce2a3c6dfddf7b7ab262a09cda4e4aa7e05e912e568c99810bf4cd84338fc77"} Jan 21 17:51:55 crc kubenswrapper[4799]: I0121 17:51:55.884291 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"17eae58b9514637c2dd824838504e2af2ffefd279b0f58ff5f12f09960db315b"} Jan 21 17:51:55 crc kubenswrapper[4799]: I0121 17:51:55.884301 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"1f207e43b534d2286c437dca72e69b1e35ba946e0bb0a8f40379a2e23fb11642"} Jan 21 17:51:56 crc kubenswrapper[4799]: I0121 17:51:56.902293 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"49fcf93aa66fcb7e90187ca043106e2db247c2605dc70094e69618a300a84492"} Jan 21 17:51:56 crc kubenswrapper[4799]: I0121 17:51:56.902351 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"82219395d94789aa59eec4bacd28cbae13840cda647778b9996d8ccc35a08f93"} Jan 21 17:51:56 crc kubenswrapper[4799]: I0121 17:51:56.902363 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"53014e7157cf6728318210ba795deb219feed1dcd0ce4057b0f0ce12a6a8f13c"} Jan 21 17:51:56 crc kubenswrapper[4799]: I0121 17:51:56.902371 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"45a94851e1b185cf8f25f4aca3f4c1e4c7d3ca9878a83c18fe6e785e9d7e24a0"} Jan 21 17:51:57 crc kubenswrapper[4799]: I0121 17:51:57.911064 4799 generic.go:334] "Generic (PLEG): container finished" podID="112b4dcd-ad4f-40da-9ec8-27bf53f989a8" containerID="7277835ea22dd4eccf648b43927e7b41037756a06015668489d45fd127056e93" exitCode=0 Jan 21 17:51:57 crc kubenswrapper[4799]: I0121 17:51:57.911480 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vlrpv" event={"ID":"112b4dcd-ad4f-40da-9ec8-27bf53f989a8","Type":"ContainerDied","Data":"7277835ea22dd4eccf648b43927e7b41037756a06015668489d45fd127056e93"} Jan 21 17:51:57 crc kubenswrapper[4799]: I0121 17:51:57.918481 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"5c13142d965e1fdf2ec269a61cb9d2541e267dbff5631a2c0d7d807238640dcd"} Jan 21 17:51:57 crc kubenswrapper[4799]: I0121 17:51:57.918521 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"e7c6f838d62d0b078a120683a79ae2d3ab6202de89a0998301554855439f9956"} Jan 21 17:51:58 crc kubenswrapper[4799]: I0121 17:51:58.959429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"b28e0e57faa86af6fb494af197509c43d05b72e35d151918a2e6f26336ece34b"} Jan 21 17:51:58 crc kubenswrapper[4799]: I0121 17:51:58.959488 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"4a782c46299e45f3aa23ce6230599e84a5a00ccf354b32f71e79f5c2afc3bed9"} Jan 21 17:51:58 crc kubenswrapper[4799]: I0121 17:51:58.959502 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"cff5266d80a3e31cc96a6de8e529a6a1aee532cb50a5ce645a83f36b49bc573c"} Jan 21 17:51:58 crc kubenswrapper[4799]: I0121 17:51:58.959516 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"c24df83ec4371e02c9a0c201e9b4ed9db9df486549353d731c760e6f01e91996"} Jan 21 17:51:58 crc kubenswrapper[4799]: I0121 17:51:58.959528 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"771ea47a-76eb-434d-ac1f-cf6048f08237","Type":"ContainerStarted","Data":"a623285e14b0465e13a9b8746f79ebcc8bbff2cc5ca05ba8c819b592918c87ad"} Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.004656 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=79.043887295 podStartE2EDuration="1m22.004635982s" podCreationTimestamp="2026-01-21 17:50:37 +0000 UTC" firstStartedPulling="2026-01-21 17:51:54.445305142 +0000 UTC m=+1141.071595165" lastFinishedPulling="2026-01-21 17:51:57.406053829 +0000 UTC m=+1144.032343852" observedRunningTime="2026-01-21 17:51:59.001982037 +0000 UTC m=+1145.628272140" watchObservedRunningTime="2026-01-21 17:51:59.004635982 +0000 UTC m=+1145.630926005" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.293562 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.316381 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68788854ff-l6zkp"] Jan 21 17:51:59 crc kubenswrapper[4799]: E0121 17:51:59.316870 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112b4dcd-ad4f-40da-9ec8-27bf53f989a8" containerName="keystone-db-sync" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.316887 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="112b4dcd-ad4f-40da-9ec8-27bf53f989a8" containerName="keystone-db-sync" Jan 21 17:51:59 crc kubenswrapper[4799]: E0121 17:51:59.316900 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="237ef136-a48e-462a-b261-c7f2e386a15e" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.316907 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="237ef136-a48e-462a-b261-c7f2e386a15e" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: E0121 17:51:59.316921 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74829fa6-f20b-437c-9a76-e336eeb52598" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.316929 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="74829fa6-f20b-437c-9a76-e336eeb52598" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: E0121 17:51:59.316942 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95c5771e-7cc6-4529-a3fb-f8568b69a74a" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.316948 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="95c5771e-7cc6-4529-a3fb-f8568b69a74a" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: E0121 17:51:59.316957 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="900a87e2-8f11-4a39-8b54-59283d6fc6c2" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.316963 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="900a87e2-8f11-4a39-8b54-59283d6fc6c2" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: E0121 17:51:59.316976 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b91e7f5a-8ad9-4965-8839-dc43dcfbaada" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.316981 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b91e7f5a-8ad9-4965-8839-dc43dcfbaada" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: E0121 17:51:59.316995 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85b36cad-2226-43fc-840c-1b44fa673bcb" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.317001 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="85b36cad-2226-43fc-840c-1b44fa673bcb" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.317219 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="74829fa6-f20b-437c-9a76-e336eeb52598" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.317240 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="900a87e2-8f11-4a39-8b54-59283d6fc6c2" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.317259 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="95c5771e-7cc6-4529-a3fb-f8568b69a74a" containerName="mariadb-database-create" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.317266 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b91e7f5a-8ad9-4965-8839-dc43dcfbaada" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.317287 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="112b4dcd-ad4f-40da-9ec8-27bf53f989a8" containerName="keystone-db-sync" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.317303 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="85b36cad-2226-43fc-840c-1b44fa673bcb" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.317316 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="237ef136-a48e-462a-b261-c7f2e386a15e" containerName="mariadb-account-create-update" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.318330 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.321331 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.333957 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68788854ff-l6zkp"] Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.481532 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-config-data\") pod \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.481637 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xbbb\" (UniqueName: \"kubernetes.io/projected/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-kube-api-access-4xbbb\") pod \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.481680 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-combined-ca-bundle\") pod \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\" (UID: \"112b4dcd-ad4f-40da-9ec8-27bf53f989a8\") " Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.482116 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgp9l\" (UniqueName: \"kubernetes.io/projected/485eaca1-f3f9-42ad-97f7-3b948cef32a0-kube-api-access-pgp9l\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.482199 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-swift-storage-0\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.482372 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-config\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.482489 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-svc\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.482558 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-nb\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.482634 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-sb\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.513462 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-kube-api-access-4xbbb" (OuterVolumeSpecName: "kube-api-access-4xbbb") pod "112b4dcd-ad4f-40da-9ec8-27bf53f989a8" (UID: "112b4dcd-ad4f-40da-9ec8-27bf53f989a8"). InnerVolumeSpecName "kube-api-access-4xbbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.515257 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "112b4dcd-ad4f-40da-9ec8-27bf53f989a8" (UID: "112b4dcd-ad4f-40da-9ec8-27bf53f989a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.552287 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-config-data" (OuterVolumeSpecName: "config-data") pod "112b4dcd-ad4f-40da-9ec8-27bf53f989a8" (UID: "112b4dcd-ad4f-40da-9ec8-27bf53f989a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.584572 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-config\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.584903 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-svc\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.584998 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-nb\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.585083 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-sb\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.585194 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgp9l\" (UniqueName: \"kubernetes.io/projected/485eaca1-f3f9-42ad-97f7-3b948cef32a0-kube-api-access-pgp9l\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.585294 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-swift-storage-0\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.585410 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.585468 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xbbb\" (UniqueName: \"kubernetes.io/projected/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-kube-api-access-4xbbb\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.585523 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112b4dcd-ad4f-40da-9ec8-27bf53f989a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.585596 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-config\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.586168 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-nb\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.586596 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-swift-storage-0\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.587054 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-svc\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.590772 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-sb\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.613599 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgp9l\" (UniqueName: \"kubernetes.io/projected/485eaca1-f3f9-42ad-97f7-3b948cef32a0-kube-api-access-pgp9l\") pod \"dnsmasq-dns-68788854ff-l6zkp\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.642468 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.970802 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vlrpv" event={"ID":"112b4dcd-ad4f-40da-9ec8-27bf53f989a8","Type":"ContainerDied","Data":"b75f521e0fbbfb0b7faddb7c26fec6212fe7b3f9a32e30a8ac34d767185b39c1"} Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.970831 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vlrpv" Jan 21 17:51:59 crc kubenswrapper[4799]: I0121 17:51:59.970858 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b75f521e0fbbfb0b7faddb7c26fec6212fe7b3f9a32e30a8ac34d767185b39c1" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.081854 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68788854ff-l6zkp"] Jan 21 17:52:00 crc kubenswrapper[4799]: W0121 17:52:00.087050 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod485eaca1_f3f9_42ad_97f7_3b948cef32a0.slice/crio-27d52aed7693d8909195139b327c021c49cd46dcd3807ba9814cada4f2e3973c WatchSource:0}: Error finding container 27d52aed7693d8909195139b327c021c49cd46dcd3807ba9814cada4f2e3973c: Status 404 returned error can't find the container with id 27d52aed7693d8909195139b327c021c49cd46dcd3807ba9814cada4f2e3973c Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.296800 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-42vhg"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.306007 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.311598 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.311876 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-phg79" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.311965 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.312381 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.318438 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.392268 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-42vhg"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.416218 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92j4q\" (UniqueName: \"kubernetes.io/projected/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-kube-api-access-92j4q\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.416277 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-fernet-keys\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.416315 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-credential-keys\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.416351 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-config-data\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.416369 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-scripts\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.416395 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-combined-ca-bundle\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.443432 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68788854ff-l6zkp"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.502220 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59787b855c-kz7lq"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.504580 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.518662 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92j4q\" (UniqueName: \"kubernetes.io/projected/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-kube-api-access-92j4q\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.518697 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-fernet-keys\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.518717 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-credential-keys\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.518746 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-config-data\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.518764 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-scripts\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.518790 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-combined-ca-bundle\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.530927 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-s94pl"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.532136 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-fernet-keys\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.533648 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.537823 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-config-data\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.537912 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59787b855c-kz7lq"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.538300 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-scripts\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.544091 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-combined-ca-bundle\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.544600 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-credential-keys\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.548498 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.548765 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zmv4p" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.549314 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.564777 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92j4q\" (UniqueName: \"kubernetes.io/projected/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-kube-api-access-92j4q\") pod \"keystone-bootstrap-42vhg\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.571041 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-s94pl"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.583022 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-74d95c7fcf-hmbcl"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.591992 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.596267 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.596455 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-kdwcc"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.600983 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.601363 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.604866 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.612889 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.613646 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-jgdss" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.613883 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.614023 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-8wxcv" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624284 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45457092-3e80-4528-99f1-b1f5f1c2f128-etc-machine-id\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624340 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-scripts\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624383 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-db-sync-config-data\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624413 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-scripts\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624439 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-sb\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624467 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-config-data\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624517 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-config-data\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624542 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9rg9\" (UniqueName: \"kubernetes.io/projected/534362cc-8f86-49f5-95af-2027f8f64b0a-kube-api-access-k9rg9\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624567 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq4ld\" (UniqueName: \"kubernetes.io/projected/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-kube-api-access-bq4ld\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624603 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-nb\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624624 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/534362cc-8f86-49f5-95af-2027f8f64b0a-logs\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624682 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-svc\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624705 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-swift-storage-0\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624732 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/534362cc-8f86-49f5-95af-2027f8f64b0a-horizon-secret-key\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624762 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8gsc\" (UniqueName: \"kubernetes.io/projected/45457092-3e80-4528-99f1-b1f5f1c2f128-kube-api-access-f8gsc\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624790 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-config\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.624863 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-combined-ca-bundle\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.627182 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74d95c7fcf-hmbcl"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.666107 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kdwcc"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.689895 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731440 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-config\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731522 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-combined-ca-bundle\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731557 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-config-data\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731579 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9rg9\" (UniqueName: \"kubernetes.io/projected/534362cc-8f86-49f5-95af-2027f8f64b0a-kube-api-access-k9rg9\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731608 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq4ld\" (UniqueName: \"kubernetes.io/projected/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-kube-api-access-bq4ld\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-nb\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731658 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/534362cc-8f86-49f5-95af-2027f8f64b0a-logs\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731730 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-svc\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731755 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-swift-storage-0\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731855 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/534362cc-8f86-49f5-95af-2027f8f64b0a-horizon-secret-key\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731893 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8gsc\" (UniqueName: \"kubernetes.io/projected/45457092-3e80-4528-99f1-b1f5f1c2f128-kube-api-access-f8gsc\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731919 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-config\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.731983 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-combined-ca-bundle\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.732036 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5v5j\" (UniqueName: \"kubernetes.io/projected/fba342a8-536f-4c59-bb2c-44984e0a7fe0-kube-api-access-k5v5j\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.732286 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45457092-3e80-4528-99f1-b1f5f1c2f128-etc-machine-id\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.732318 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-scripts\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.732363 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-db-sync-config-data\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.732410 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-scripts\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.732444 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-sb\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.732469 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-config-data\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.734743 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45457092-3e80-4528-99f1-b1f5f1c2f128-etc-machine-id\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.735746 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-config\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.740836 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-config-data\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.741317 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-scripts\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.741519 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/534362cc-8f86-49f5-95af-2027f8f64b0a-logs\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.742142 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-nb\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.742668 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-svc\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.743163 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-swift-storage-0\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.748594 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-sb\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.757481 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-config-data\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.760118 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/534362cc-8f86-49f5-95af-2027f8f64b0a-horizon-secret-key\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.760542 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-combined-ca-bundle\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.760576 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-h8tvt"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.762882 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-db-sync-config-data\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.765440 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9rg9\" (UniqueName: \"kubernetes.io/projected/534362cc-8f86-49f5-95af-2027f8f64b0a-kube-api-access-k9rg9\") pod \"horizon-74d95c7fcf-hmbcl\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.774975 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-scripts\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.780404 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8gsc\" (UniqueName: \"kubernetes.io/projected/45457092-3e80-4528-99f1-b1f5f1c2f128-kube-api-access-f8gsc\") pod \"cinder-db-sync-s94pl\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.792455 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.794421 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-h8tvt"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.796364 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ggtzr" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.796655 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.796913 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.798865 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq4ld\" (UniqueName: \"kubernetes.io/projected/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-kube-api-access-bq4ld\") pod \"dnsmasq-dns-59787b855c-kz7lq\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.837248 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-config\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.837375 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-combined-ca-bundle\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.837725 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-scripts\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.837798 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e9e3984-f73c-4a6d-8d5d-107481439374-logs\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.837984 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-combined-ca-bundle\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.838069 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-config-data\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.838160 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqmch\" (UniqueName: \"kubernetes.io/projected/9e9e3984-f73c-4a6d-8d5d-107481439374-kube-api-access-vqmch\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.845931 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.847744 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.852041 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.854150 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5v5j\" (UniqueName: \"kubernetes.io/projected/fba342a8-536f-4c59-bb2c-44984e0a7fe0-kube-api-access-k5v5j\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.855370 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.855619 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.857173 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-847fc5fb45-sxffr"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.859230 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.867281 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-847fc5fb45-sxffr"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.868487 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-config\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.875747 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.882963 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5v5j\" (UniqueName: \"kubernetes.io/projected/fba342a8-536f-4c59-bb2c-44984e0a7fe0-kube-api-access-k5v5j\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.885375 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-combined-ca-bundle\") pod \"neutron-db-sync-kdwcc\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.927542 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59787b855c-kz7lq"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.930479 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.944565 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.957061 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnmxt\" (UniqueName: \"kubernetes.io/projected/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-kube-api-access-nnmxt\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.957711 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-scripts\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.957828 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e9e3984-f73c-4a6d-8d5d-107481439374-logs\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.957939 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-run-httpd\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958042 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-scripts\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958116 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-combined-ca-bundle\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958218 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-config-data\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958288 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqmch\" (UniqueName: \"kubernetes.io/projected/9e9e3984-f73c-4a6d-8d5d-107481439374-kube-api-access-vqmch\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958381 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t28nv\" (UniqueName: \"kubernetes.io/projected/99ff9621-8520-4d76-9db8-87bb562c6499-kube-api-access-t28nv\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958533 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99ff9621-8520-4d76-9db8-87bb562c6499-logs\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958617 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958687 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-config-data\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958753 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-config-data\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958834 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99ff9621-8520-4d76-9db8-87bb562c6499-horizon-secret-key\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958919 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-scripts\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.958995 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.959086 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-log-httpd\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.960634 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e9e3984-f73c-4a6d-8d5d-107481439374-logs\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.966212 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-scripts\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.972509 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-config-data\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.978952 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-combined-ca-bundle\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.979793 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77f6cc8899-p9cjd"] Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.984870 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:00 crc kubenswrapper[4799]: I0121 17:52:00.988633 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqmch\" (UniqueName: \"kubernetes.io/projected/9e9e3984-f73c-4a6d-8d5d-107481439374-kube-api-access-vqmch\") pod \"placement-db-sync-h8tvt\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.020518 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77f6cc8899-p9cjd"] Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.022026 4799 generic.go:334] "Generic (PLEG): container finished" podID="485eaca1-f3f9-42ad-97f7-3b948cef32a0" containerID="9fb2f8b69983f05d0604d71f9089713e10a905d07d55e57eae1c02ca0bb0ee5f" exitCode=0 Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.023209 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68788854ff-l6zkp" event={"ID":"485eaca1-f3f9-42ad-97f7-3b948cef32a0","Type":"ContainerDied","Data":"9fb2f8b69983f05d0604d71f9089713e10a905d07d55e57eae1c02ca0bb0ee5f"} Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.033264 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68788854ff-l6zkp" event={"ID":"485eaca1-f3f9-42ad-97f7-3b948cef32a0","Type":"ContainerStarted","Data":"27d52aed7693d8909195139b327c021c49cd46dcd3807ba9814cada4f2e3973c"} Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061198 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-config\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061246 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnmxt\" (UniqueName: \"kubernetes.io/projected/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-kube-api-access-nnmxt\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061273 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-nb\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061319 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-swift-storage-0\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061346 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-run-httpd\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061378 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-scripts\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061425 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-sb\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061477 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t28nv\" (UniqueName: \"kubernetes.io/projected/99ff9621-8520-4d76-9db8-87bb562c6499-kube-api-access-t28nv\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061510 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99ff9621-8520-4d76-9db8-87bb562c6499-logs\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061532 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-svc\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061552 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061573 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-config-data\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061594 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-config-data\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061614 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99ff9621-8520-4d76-9db8-87bb562c6499-horizon-secret-key\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061637 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj992\" (UniqueName: \"kubernetes.io/projected/ec158420-9dbf-4413-bd52-5041a9cee032-kube-api-access-nj992\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061654 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-scripts\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061680 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061696 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-log-httpd\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.061929 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-vkhcg"] Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.062488 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-log-httpd\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.063739 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-run-httpd\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.064513 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-scripts\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.066600 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.068564 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99ff9621-8520-4d76-9db8-87bb562c6499-logs\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.076910 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-config-data\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.081421 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.088939 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-scripts\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.089836 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nlppk" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.092058 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.107051 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-vkhcg"] Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.135767 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t28nv\" (UniqueName: \"kubernetes.io/projected/99ff9621-8520-4d76-9db8-87bb562c6499-kube-api-access-t28nv\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.148820 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-config-data\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.149450 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.160786 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99ff9621-8520-4d76-9db8-87bb562c6499-horizon-secret-key\") pod \"horizon-847fc5fb45-sxffr\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.176452 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.182942 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-config\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.183218 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-nb\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.183319 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-swift-storage-0\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.183518 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm6w7\" (UniqueName: \"kubernetes.io/projected/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-kube-api-access-qm6w7\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.183606 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-sb\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.183674 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-combined-ca-bundle\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.183787 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-db-sync-config-data\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.183924 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-svc\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.184063 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj992\" (UniqueName: \"kubernetes.io/projected/ec158420-9dbf-4413-bd52-5041a9cee032-kube-api-access-nj992\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.184103 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnmxt\" (UniqueName: \"kubernetes.io/projected/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-kube-api-access-nnmxt\") pod \"ceilometer-0\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.184969 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-sb\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.185353 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-svc\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.186663 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-config\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.186960 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-swift-storage-0\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.188621 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-nb\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.212026 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj992\" (UniqueName: \"kubernetes.io/projected/ec158420-9dbf-4413-bd52-5041a9cee032-kube-api-access-nj992\") pod \"dnsmasq-dns-77f6cc8899-p9cjd\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.283732 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.285737 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm6w7\" (UniqueName: \"kubernetes.io/projected/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-kube-api-access-qm6w7\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.285783 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-combined-ca-bundle\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.285815 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-db-sync-config-data\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.294546 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-combined-ca-bundle\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.295399 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-db-sync-config-data\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.309045 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.313518 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm6w7\" (UniqueName: \"kubernetes.io/projected/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-kube-api-access-qm6w7\") pod \"barbican-db-sync-vkhcg\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.328246 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.371217 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.412095 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-42vhg"] Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.479794 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:01 crc kubenswrapper[4799]: W0121 17:52:01.653353 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fdeb1d8_5803_47be_8625_68aeaf6a8ef2.slice/crio-bff3d2b416977d0b9f17726d2052e772d5ade951784a5a87840fbb1a77ba4ad3 WatchSource:0}: Error finding container bff3d2b416977d0b9f17726d2052e772d5ade951784a5a87840fbb1a77ba4ad3: Status 404 returned error can't find the container with id bff3d2b416977d0b9f17726d2052e772d5ade951784a5a87840fbb1a77ba4ad3 Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.684387 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59787b855c-kz7lq"] Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.705039 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-s94pl"] Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.783837 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.839777 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kdwcc"] Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.851281 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74d95c7fcf-hmbcl"] Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.900265 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-swift-storage-0\") pod \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.900567 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-config\") pod \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.900633 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-sb\") pod \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.900666 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-nb\") pod \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.900734 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgp9l\" (UniqueName: \"kubernetes.io/projected/485eaca1-f3f9-42ad-97f7-3b948cef32a0-kube-api-access-pgp9l\") pod \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.900840 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-svc\") pod \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\" (UID: \"485eaca1-f3f9-42ad-97f7-3b948cef32a0\") " Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.917465 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/485eaca1-f3f9-42ad-97f7-3b948cef32a0-kube-api-access-pgp9l" (OuterVolumeSpecName: "kube-api-access-pgp9l") pod "485eaca1-f3f9-42ad-97f7-3b948cef32a0" (UID: "485eaca1-f3f9-42ad-97f7-3b948cef32a0"). InnerVolumeSpecName "kube-api-access-pgp9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:01 crc kubenswrapper[4799]: W0121 17:52:01.942711 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45457092_3e80_4528_99f1_b1f5f1c2f128.slice/crio-cb663c37995d672b7288f14fb9bff80aea8cef394c7ac671b8f025dcb343d9b5 WatchSource:0}: Error finding container cb663c37995d672b7288f14fb9bff80aea8cef394c7ac671b8f025dcb343d9b5: Status 404 returned error can't find the container with id cb663c37995d672b7288f14fb9bff80aea8cef394c7ac671b8f025dcb343d9b5 Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.943787 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "485eaca1-f3f9-42ad-97f7-3b948cef32a0" (UID: "485eaca1-f3f9-42ad-97f7-3b948cef32a0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:01 crc kubenswrapper[4799]: W0121 17:52:01.944342 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e51eac7_15ac_4cd2_bc33_8ce20b2e8525.slice/crio-994a5f9b86a1d6dfb69efcf340d580e6c0805421c099b27e6bee297144334ca6 WatchSource:0}: Error finding container 994a5f9b86a1d6dfb69efcf340d580e6c0805421c099b27e6bee297144334ca6: Status 404 returned error can't find the container with id 994a5f9b86a1d6dfb69efcf340d580e6c0805421c099b27e6bee297144334ca6 Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.952999 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "485eaca1-f3f9-42ad-97f7-3b948cef32a0" (UID: "485eaca1-f3f9-42ad-97f7-3b948cef32a0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:01 crc kubenswrapper[4799]: W0121 17:52:01.957362 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod534362cc_8f86_49f5_95af_2027f8f64b0a.slice/crio-5e4674308dff9469c0d6f1ab11bc86f7d92cfe5e0f51e4dbe94011d3c17a138d WatchSource:0}: Error finding container 5e4674308dff9469c0d6f1ab11bc86f7d92cfe5e0f51e4dbe94011d3c17a138d: Status 404 returned error can't find the container with id 5e4674308dff9469c0d6f1ab11bc86f7d92cfe5e0f51e4dbe94011d3c17a138d Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.959365 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-config" (OuterVolumeSpecName: "config") pod "485eaca1-f3f9-42ad-97f7-3b948cef32a0" (UID: "485eaca1-f3f9-42ad-97f7-3b948cef32a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:01 crc kubenswrapper[4799]: I0121 17:52:01.964003 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "485eaca1-f3f9-42ad-97f7-3b948cef32a0" (UID: "485eaca1-f3f9-42ad-97f7-3b948cef32a0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.002184 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "485eaca1-f3f9-42ad-97f7-3b948cef32a0" (UID: "485eaca1-f3f9-42ad-97f7-3b948cef32a0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.005380 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.005540 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.005601 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.005653 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgp9l\" (UniqueName: \"kubernetes.io/projected/485eaca1-f3f9-42ad-97f7-3b948cef32a0-kube-api-access-pgp9l\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.005704 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.005785 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/485eaca1-f3f9-42ad-97f7-3b948cef32a0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.057341 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-s94pl" event={"ID":"45457092-3e80-4528-99f1-b1f5f1c2f128","Type":"ContainerStarted","Data":"cb663c37995d672b7288f14fb9bff80aea8cef394c7ac671b8f025dcb343d9b5"} Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.062533 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74d95c7fcf-hmbcl" event={"ID":"534362cc-8f86-49f5-95af-2027f8f64b0a","Type":"ContainerStarted","Data":"5e4674308dff9469c0d6f1ab11bc86f7d92cfe5e0f51e4dbe94011d3c17a138d"} Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.066064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59787b855c-kz7lq" event={"ID":"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525","Type":"ContainerStarted","Data":"994a5f9b86a1d6dfb69efcf340d580e6c0805421c099b27e6bee297144334ca6"} Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.071331 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-42vhg" event={"ID":"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2","Type":"ContainerStarted","Data":"bff3d2b416977d0b9f17726d2052e772d5ade951784a5a87840fbb1a77ba4ad3"} Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.073404 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68788854ff-l6zkp" event={"ID":"485eaca1-f3f9-42ad-97f7-3b948cef32a0","Type":"ContainerDied","Data":"27d52aed7693d8909195139b327c021c49cd46dcd3807ba9814cada4f2e3973c"} Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.073479 4799 scope.go:117] "RemoveContainer" containerID="9fb2f8b69983f05d0604d71f9089713e10a905d07d55e57eae1c02ca0bb0ee5f" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.073653 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68788854ff-l6zkp" Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.087526 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kdwcc" event={"ID":"fba342a8-536f-4c59-bb2c-44984e0a7fe0","Type":"ContainerStarted","Data":"cf96656d491643735efedcdba6d1259ab85b9e0ca667215ebb26d28b6aa3f623"} Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.302107 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68788854ff-l6zkp"] Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.302453 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68788854ff-l6zkp"] Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.302475 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-h8tvt"] Jan 21 17:52:02 crc kubenswrapper[4799]: W0121 17:52:02.326300 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e9e3984_f73c_4a6d_8d5d_107481439374.slice/crio-741a4528ebe59a5bd70b7d8b76d2a4eff7bee55a5e2f36bfd59f6d007e087bb6 WatchSource:0}: Error finding container 741a4528ebe59a5bd70b7d8b76d2a4eff7bee55a5e2f36bfd59f6d007e087bb6: Status 404 returned error can't find the container with id 741a4528ebe59a5bd70b7d8b76d2a4eff7bee55a5e2f36bfd59f6d007e087bb6 Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.562429 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77f6cc8899-p9cjd"] Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.742456 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-vkhcg"] Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.750975 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:52:02 crc kubenswrapper[4799]: I0121 17:52:02.931233 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-847fc5fb45-sxffr"] Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.120449 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-h8tvt" event={"ID":"9e9e3984-f73c-4a6d-8d5d-107481439374","Type":"ContainerStarted","Data":"741a4528ebe59a5bd70b7d8b76d2a4eff7bee55a5e2f36bfd59f6d007e087bb6"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.125203 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kdwcc" event={"ID":"fba342a8-536f-4c59-bb2c-44984e0a7fe0","Type":"ContainerStarted","Data":"1cc27ab2ad3ef045292ec06cc1fa63b7684e9e05a018b5c43b21dafae099ee42"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.139817 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vkhcg" event={"ID":"d7930da5-46c0-4cc3-a63a-316aff9f5b3a","Type":"ContainerStarted","Data":"9c56305d5af43e8d9a3b4e61477139a3e810e8bc3bbce08275eed291aae83545"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.144988 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-kdwcc" podStartSLOduration=3.144965916 podStartE2EDuration="3.144965916s" podCreationTimestamp="2026-01-21 17:52:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:03.144815272 +0000 UTC m=+1149.771105295" watchObservedRunningTime="2026-01-21 17:52:03.144965916 +0000 UTC m=+1149.771255939" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.150013 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-42vhg" event={"ID":"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2","Type":"ContainerStarted","Data":"3a209a83cb06172c4f8de2be0e4ba44d2dbbc26a8af1dd62cab91b68e57db928"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.153923 4799 generic.go:334] "Generic (PLEG): container finished" podID="3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" containerID="4adb27ae150b5fcfe75388a0ad632f3e743754814d1fcb31f681dfb62ed9fa22" exitCode=0 Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.154106 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59787b855c-kz7lq" event={"ID":"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525","Type":"ContainerDied","Data":"4adb27ae150b5fcfe75388a0ad632f3e743754814d1fcb31f681dfb62ed9fa22"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.161913 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-847fc5fb45-sxffr" event={"ID":"99ff9621-8520-4d76-9db8-87bb562c6499","Type":"ContainerStarted","Data":"ca3d8a4f28a7554f0e8bde0e6357a5e1d22d13bf1b011e3af02bc96d19773084"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.163634 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerStarted","Data":"e1af4383698524337c3547b707a85d1d51d1d0a641eaff9cfdb9a909ae2490d9"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.177613 4799 generic.go:334] "Generic (PLEG): container finished" podID="ec158420-9dbf-4413-bd52-5041a9cee032" containerID="1450769535c3b6f9aa5cdfcf9617407249d96861431df10db7bd894c258ba4f0" exitCode=0 Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.177665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" event={"ID":"ec158420-9dbf-4413-bd52-5041a9cee032","Type":"ContainerDied","Data":"1450769535c3b6f9aa5cdfcf9617407249d96861431df10db7bd894c258ba4f0"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.177691 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" event={"ID":"ec158420-9dbf-4413-bd52-5041a9cee032","Type":"ContainerStarted","Data":"c244a1761a75df522824f1e30ce504e8a06dfe5f72536f066c8e3415c36a5a82"} Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.219761 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-42vhg" podStartSLOduration=3.219738012 podStartE2EDuration="3.219738012s" podCreationTimestamp="2026-01-21 17:52:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:03.17616543 +0000 UTC m=+1149.802455463" watchObservedRunningTime="2026-01-21 17:52:03.219738012 +0000 UTC m=+1149.846028025" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.626606 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.662332 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-swift-storage-0\") pod \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.662749 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq4ld\" (UniqueName: \"kubernetes.io/projected/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-kube-api-access-bq4ld\") pod \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.662920 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-config\") pod \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.663038 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-svc\") pod \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.663144 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-sb\") pod \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.663177 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-nb\") pod \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\" (UID: \"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525\") " Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.683508 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-kube-api-access-bq4ld" (OuterVolumeSpecName: "kube-api-access-bq4ld") pod "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" (UID: "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525"). InnerVolumeSpecName "kube-api-access-bq4ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.711206 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" (UID: "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.723693 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" (UID: "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.751717 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" (UID: "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.752112 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" (UID: "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.753583 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-config" (OuterVolumeSpecName: "config") pod "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" (UID: "3e51eac7-15ac-4cd2-bc33-8ce20b2e8525"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.768908 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.768942 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq4ld\" (UniqueName: \"kubernetes.io/projected/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-kube-api-access-bq4ld\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.768955 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.768964 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.768973 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:03 crc kubenswrapper[4799]: I0121 17:52:03.768981 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.165083 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74d95c7fcf-hmbcl"] Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.204262 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-645b97c799-l2dkc"] Jan 21 17:52:04 crc kubenswrapper[4799]: E0121 17:52:04.217495 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" containerName="init" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.217518 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" containerName="init" Jan 21 17:52:04 crc kubenswrapper[4799]: E0121 17:52:04.217543 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485eaca1-f3f9-42ad-97f7-3b948cef32a0" containerName="init" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.217550 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="485eaca1-f3f9-42ad-97f7-3b948cef32a0" containerName="init" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.226789 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="485eaca1-f3f9-42ad-97f7-3b948cef32a0" containerName="init" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.226820 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" containerName="init" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.228755 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.254620 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59787b855c-kz7lq" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.288535 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="485eaca1-f3f9-42ad-97f7-3b948cef32a0" path="/var/lib/kubelet/pods/485eaca1-f3f9-42ad-97f7-3b948cef32a0/volumes" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.289295 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.289324 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.289347 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59787b855c-kz7lq" event={"ID":"3e51eac7-15ac-4cd2-bc33-8ce20b2e8525","Type":"ContainerDied","Data":"994a5f9b86a1d6dfb69efcf340d580e6c0805421c099b27e6bee297144334ca6"} Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.289372 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" event={"ID":"ec158420-9dbf-4413-bd52-5041a9cee032","Type":"ContainerStarted","Data":"fb039c711b87ae36fb240da128277b0f719a0e4e12486da552724ebd7f6923c4"} Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.289396 4799 scope.go:117] "RemoveContainer" containerID="4adb27ae150b5fcfe75388a0ad632f3e743754814d1fcb31f681dfb62ed9fa22" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.289535 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9a6873a-570e-43e5-b185-3d5a645c59d8-horizon-secret-key\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.289783 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-config-data\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.289845 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xft2f\" (UniqueName: \"kubernetes.io/projected/a9a6873a-570e-43e5-b185-3d5a645c59d8-kube-api-access-xft2f\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.290113 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9a6873a-570e-43e5-b185-3d5a645c59d8-logs\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.290183 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-scripts\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.303633 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-645b97c799-l2dkc"] Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.391894 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-config-data\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.391972 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xft2f\" (UniqueName: \"kubernetes.io/projected/a9a6873a-570e-43e5-b185-3d5a645c59d8-kube-api-access-xft2f\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.392264 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9a6873a-570e-43e5-b185-3d5a645c59d8-logs\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.392300 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-scripts\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.392342 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9a6873a-570e-43e5-b185-3d5a645c59d8-horizon-secret-key\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.394430 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-scripts\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.394699 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9a6873a-570e-43e5-b185-3d5a645c59d8-logs\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.394702 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-config-data\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.406244 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9a6873a-570e-43e5-b185-3d5a645c59d8-horizon-secret-key\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.423520 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xft2f\" (UniqueName: \"kubernetes.io/projected/a9a6873a-570e-43e5-b185-3d5a645c59d8-kube-api-access-xft2f\") pod \"horizon-645b97c799-l2dkc\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.546456 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59787b855c-kz7lq"] Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.558361 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59787b855c-kz7lq"] Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.558862 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" podStartSLOduration=4.55884293 podStartE2EDuration="4.55884293s" podCreationTimestamp="2026-01-21 17:52:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:04.510984868 +0000 UTC m=+1151.137274901" watchObservedRunningTime="2026-01-21 17:52:04.55884293 +0000 UTC m=+1151.185132953" Jan 21 17:52:04 crc kubenswrapper[4799]: I0121 17:52:04.571075 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:05 crc kubenswrapper[4799]: I0121 17:52:05.260931 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-645b97c799-l2dkc"] Jan 21 17:52:05 crc kubenswrapper[4799]: W0121 17:52:05.300689 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9a6873a_570e_43e5_b185_3d5a645c59d8.slice/crio-46d7a78057efda9b20a9c73374575af33c283ea6caece5aa6aa817ae9368ad6b WatchSource:0}: Error finding container 46d7a78057efda9b20a9c73374575af33c283ea6caece5aa6aa817ae9368ad6b: Status 404 returned error can't find the container with id 46d7a78057efda9b20a9c73374575af33c283ea6caece5aa6aa817ae9368ad6b Jan 21 17:52:06 crc kubenswrapper[4799]: I0121 17:52:06.231006 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e51eac7-15ac-4cd2-bc33-8ce20b2e8525" path="/var/lib/kubelet/pods/3e51eac7-15ac-4cd2-bc33-8ce20b2e8525/volumes" Jan 21 17:52:06 crc kubenswrapper[4799]: I0121 17:52:06.308009 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-645b97c799-l2dkc" event={"ID":"a9a6873a-570e-43e5-b185-3d5a645c59d8","Type":"ContainerStarted","Data":"46d7a78057efda9b20a9c73374575af33c283ea6caece5aa6aa817ae9368ad6b"} Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.716926 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-847fc5fb45-sxffr"] Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.771106 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7d9c7df8bb-b2r9b"] Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.773115 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.797645 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.807571 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7d9c7df8bb-b2r9b"] Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.840682 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-645b97c799-l2dkc"] Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.846053 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-scripts\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.846114 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fde84d23-f64f-4299-af94-1d29894acdc0-logs\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.846189 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-tls-certs\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.846234 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-secret-key\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.846272 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9st5k\" (UniqueName: \"kubernetes.io/projected/fde84d23-f64f-4299-af94-1d29894acdc0-kube-api-access-9st5k\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.846327 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-config-data\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.846345 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-combined-ca-bundle\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.880396 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-585ff694b6-5fph4"] Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.891761 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-585ff694b6-5fph4"] Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.891854 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948351 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9st5k\" (UniqueName: \"kubernetes.io/projected/fde84d23-f64f-4299-af94-1d29894acdc0-kube-api-access-9st5k\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948422 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-combined-ca-bundle\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948474 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-config-data\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948515 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-combined-ca-bundle\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948553 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-horizon-tls-certs\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948582 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxfrg\" (UniqueName: \"kubernetes.io/projected/b8391139-71cc-48bb-af31-242cebaea8de-kube-api-access-hxfrg\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948610 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-horizon-secret-key\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-scripts\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fde84d23-f64f-4299-af94-1d29894acdc0-logs\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948690 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b8391139-71cc-48bb-af31-242cebaea8de-config-data\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948711 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8391139-71cc-48bb-af31-242cebaea8de-logs\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948736 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-tls-certs\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948770 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-secret-key\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.948798 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8391139-71cc-48bb-af31-242cebaea8de-scripts\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.950730 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-scripts\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.951006 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fde84d23-f64f-4299-af94-1d29894acdc0-logs\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.955189 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-config-data\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.955604 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-tls-certs\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.960323 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-combined-ca-bundle\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.963446 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-secret-key\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:09 crc kubenswrapper[4799]: I0121 17:52:09.979955 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9st5k\" (UniqueName: \"kubernetes.io/projected/fde84d23-f64f-4299-af94-1d29894acdc0-kube-api-access-9st5k\") pod \"horizon-7d9c7df8bb-b2r9b\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.051408 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b8391139-71cc-48bb-af31-242cebaea8de-config-data\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.051484 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8391139-71cc-48bb-af31-242cebaea8de-logs\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.052947 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8391139-71cc-48bb-af31-242cebaea8de-logs\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.053063 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b8391139-71cc-48bb-af31-242cebaea8de-config-data\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.053215 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8391139-71cc-48bb-af31-242cebaea8de-scripts\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.053278 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-combined-ca-bundle\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.053366 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-horizon-tls-certs\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.053400 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxfrg\" (UniqueName: \"kubernetes.io/projected/b8391139-71cc-48bb-af31-242cebaea8de-kube-api-access-hxfrg\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.053433 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-horizon-secret-key\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.054685 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8391139-71cc-48bb-af31-242cebaea8de-scripts\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.058980 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-horizon-secret-key\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.072918 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-combined-ca-bundle\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.074318 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8391139-71cc-48bb-af31-242cebaea8de-horizon-tls-certs\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.074903 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxfrg\" (UniqueName: \"kubernetes.io/projected/b8391139-71cc-48bb-af31-242cebaea8de-kube-api-access-hxfrg\") pod \"horizon-585ff694b6-5fph4\" (UID: \"b8391139-71cc-48bb-af31-242cebaea8de\") " pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.112672 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:10 crc kubenswrapper[4799]: I0121 17:52:10.232002 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:11 crc kubenswrapper[4799]: I0121 17:52:11.373285 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:52:11 crc kubenswrapper[4799]: I0121 17:52:11.477512 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-648b6fc9cc-db49n"] Jan 21 17:52:11 crc kubenswrapper[4799]: I0121 17:52:11.477786 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="dnsmasq-dns" containerID="cri-o://a7dfc94a5d699e7957cc86850a5491be3a74a7e79229572171d019076121f11c" gracePeriod=10 Jan 21 17:52:12 crc kubenswrapper[4799]: I0121 17:52:12.432945 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Jan 21 17:52:12 crc kubenswrapper[4799]: I0121 17:52:12.435854 4799 generic.go:334] "Generic (PLEG): container finished" podID="6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" containerID="3a209a83cb06172c4f8de2be0e4ba44d2dbbc26a8af1dd62cab91b68e57db928" exitCode=0 Jan 21 17:52:12 crc kubenswrapper[4799]: I0121 17:52:12.435981 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-42vhg" event={"ID":"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2","Type":"ContainerDied","Data":"3a209a83cb06172c4f8de2be0e4ba44d2dbbc26a8af1dd62cab91b68e57db928"} Jan 21 17:52:12 crc kubenswrapper[4799]: I0121 17:52:12.441626 4799 generic.go:334] "Generic (PLEG): container finished" podID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerID="a7dfc94a5d699e7957cc86850a5491be3a74a7e79229572171d019076121f11c" exitCode=0 Jan 21 17:52:12 crc kubenswrapper[4799]: I0121 17:52:12.441696 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" event={"ID":"6df31e83-1f86-4266-be9f-7e8dbfd25922","Type":"ContainerDied","Data":"a7dfc94a5d699e7957cc86850a5491be3a74a7e79229572171d019076121f11c"} Jan 21 17:52:21 crc kubenswrapper[4799]: E0121 17:52:21.363820 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest" Jan 21 17:52:21 crc kubenswrapper[4799]: E0121 17:52:21.364686 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest" Jan 21 17:52:21 crc kubenswrapper[4799]: E0121 17:52:21.364871 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb9h5cfh678h58fh97h664h645h6h5c7h76h5dh679h678h96h5c7h64ch8h686hc9h694hdh5c9h54h5bfh648h688h55bhbbhb8h59bh5dbhfcq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t28nv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-847fc5fb45-sxffr_openstack(99ff9621-8520-4d76-9db8-87bb562c6499): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:52:21 crc kubenswrapper[4799]: E0121 17:52:21.376781 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-847fc5fb45-sxffr" podUID="99ff9621-8520-4d76-9db8-87bb562c6499" Jan 21 17:52:21 crc kubenswrapper[4799]: E0121 17:52:21.400649 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest" Jan 21 17:52:21 crc kubenswrapper[4799]: E0121 17:52:21.400720 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest" Jan 21 17:52:21 crc kubenswrapper[4799]: E0121 17:52:21.400937 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n685h5bdh5fdh556h5f9hb4h56fh68h56bh5b5h65dh59fh9hffh76h68dhdh84h596h64dh698h695hdch8h94h575hf5hf6h575h56fh56ch67bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xft2f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-645b97c799-l2dkc_openstack(a9a6873a-570e-43e5-b185-3d5a645c59d8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:52:21 crc kubenswrapper[4799]: E0121 17:52:21.403177 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-645b97c799-l2dkc" podUID="a9a6873a-570e-43e5-b185-3d5a645c59d8" Jan 21 17:52:22 crc kubenswrapper[4799]: I0121 17:52:22.434267 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Jan 21 17:52:27 crc kubenswrapper[4799]: I0121 17:52:27.434583 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Jan 21 17:52:27 crc kubenswrapper[4799]: I0121 17:52:27.435460 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:52:28 crc kubenswrapper[4799]: E0121 17:52:28.990962 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest" Jan 21 17:52:28 crc kubenswrapper[4799]: E0121 17:52:28.991169 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest" Jan 21 17:52:28 crc kubenswrapper[4799]: E0121 17:52:28.991345 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n549h686h65dh649h555h67dh7fh5b4h5cdh678h54fh68fhbch5dh78h557hc7h646h74hf5h68h565h647h55dh5fbh544h57dhb8h65ch5f9h695h687q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:yes,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k9rg9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-74d95c7fcf-hmbcl_openstack(534362cc-8f86-49f5-95af-2027f8f64b0a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:52:28 crc kubenswrapper[4799]: E0121 17:52:28.998296 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-horizon:watcher_latest\\\"\"]" pod="openstack/horizon-74d95c7fcf-hmbcl" podUID="534362cc-8f86-49f5-95af-2027f8f64b0a" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.122200 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.126983 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.132984 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.139486 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207172 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xft2f\" (UniqueName: \"kubernetes.io/projected/a9a6873a-570e-43e5-b185-3d5a645c59d8-kube-api-access-xft2f\") pod \"a9a6873a-570e-43e5-b185-3d5a645c59d8\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207232 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t28nv\" (UniqueName: \"kubernetes.io/projected/99ff9621-8520-4d76-9db8-87bb562c6499-kube-api-access-t28nv\") pod \"99ff9621-8520-4d76-9db8-87bb562c6499\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207292 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-config\") pod \"6df31e83-1f86-4266-be9f-7e8dbfd25922\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207344 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q5xj\" (UniqueName: \"kubernetes.io/projected/6df31e83-1f86-4266-be9f-7e8dbfd25922-kube-api-access-6q5xj\") pod \"6df31e83-1f86-4266-be9f-7e8dbfd25922\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207371 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-fernet-keys\") pod \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207421 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99ff9621-8520-4d76-9db8-87bb562c6499-logs\") pod \"99ff9621-8520-4d76-9db8-87bb562c6499\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207465 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99ff9621-8520-4d76-9db8-87bb562c6499-horizon-secret-key\") pod \"99ff9621-8520-4d76-9db8-87bb562c6499\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207494 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-scripts\") pod \"a9a6873a-570e-43e5-b185-3d5a645c59d8\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207515 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-nb\") pod \"6df31e83-1f86-4266-be9f-7e8dbfd25922\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207540 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9a6873a-570e-43e5-b185-3d5a645c59d8-horizon-secret-key\") pod \"a9a6873a-570e-43e5-b185-3d5a645c59d8\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207593 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92j4q\" (UniqueName: \"kubernetes.io/projected/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-kube-api-access-92j4q\") pod \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207627 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-config-data\") pod \"a9a6873a-570e-43e5-b185-3d5a645c59d8\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207662 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-dns-svc\") pod \"6df31e83-1f86-4266-be9f-7e8dbfd25922\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207711 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9a6873a-570e-43e5-b185-3d5a645c59d8-logs\") pod \"a9a6873a-570e-43e5-b185-3d5a645c59d8\" (UID: \"a9a6873a-570e-43e5-b185-3d5a645c59d8\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207733 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-config-data\") pod \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207762 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-credential-keys\") pod \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207789 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-scripts\") pod \"99ff9621-8520-4d76-9db8-87bb562c6499\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207821 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-scripts\") pod \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207869 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-config-data\") pod \"99ff9621-8520-4d76-9db8-87bb562c6499\" (UID: \"99ff9621-8520-4d76-9db8-87bb562c6499\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207897 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-combined-ca-bundle\") pod \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\" (UID: \"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.207941 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-sb\") pod \"6df31e83-1f86-4266-be9f-7e8dbfd25922\" (UID: \"6df31e83-1f86-4266-be9f-7e8dbfd25922\") " Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.209242 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99ff9621-8520-4d76-9db8-87bb562c6499-logs" (OuterVolumeSpecName: "logs") pod "99ff9621-8520-4d76-9db8-87bb562c6499" (UID: "99ff9621-8520-4d76-9db8-87bb562c6499"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.210868 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-config-data" (OuterVolumeSpecName: "config-data") pod "a9a6873a-570e-43e5-b185-3d5a645c59d8" (UID: "a9a6873a-570e-43e5-b185-3d5a645c59d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.211945 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9a6873a-570e-43e5-b185-3d5a645c59d8-kube-api-access-xft2f" (OuterVolumeSpecName: "kube-api-access-xft2f") pod "a9a6873a-570e-43e5-b185-3d5a645c59d8" (UID: "a9a6873a-570e-43e5-b185-3d5a645c59d8"). InnerVolumeSpecName "kube-api-access-xft2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.215340 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9a6873a-570e-43e5-b185-3d5a645c59d8-logs" (OuterVolumeSpecName: "logs") pod "a9a6873a-570e-43e5-b185-3d5a645c59d8" (UID: "a9a6873a-570e-43e5-b185-3d5a645c59d8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.216067 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-config-data" (OuterVolumeSpecName: "config-data") pod "99ff9621-8520-4d76-9db8-87bb562c6499" (UID: "99ff9621-8520-4d76-9db8-87bb562c6499"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.216256 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" (UID: "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.216247 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-scripts" (OuterVolumeSpecName: "scripts") pod "a9a6873a-570e-43e5-b185-3d5a645c59d8" (UID: "a9a6873a-570e-43e5-b185-3d5a645c59d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.216408 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99ff9621-8520-4d76-9db8-87bb562c6499-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "99ff9621-8520-4d76-9db8-87bb562c6499" (UID: "99ff9621-8520-4d76-9db8-87bb562c6499"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.216410 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-kube-api-access-92j4q" (OuterVolumeSpecName: "kube-api-access-92j4q") pod "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" (UID: "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2"). InnerVolumeSpecName "kube-api-access-92j4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.223571 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99ff9621-8520-4d76-9db8-87bb562c6499-kube-api-access-t28nv" (OuterVolumeSpecName: "kube-api-access-t28nv") pod "99ff9621-8520-4d76-9db8-87bb562c6499" (UID: "99ff9621-8520-4d76-9db8-87bb562c6499"). InnerVolumeSpecName "kube-api-access-t28nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.223747 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-scripts" (OuterVolumeSpecName: "scripts") pod "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" (UID: "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.237076 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" (UID: "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.237432 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-scripts" (OuterVolumeSpecName: "scripts") pod "99ff9621-8520-4d76-9db8-87bb562c6499" (UID: "99ff9621-8520-4d76-9db8-87bb562c6499"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.237498 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a6873a-570e-43e5-b185-3d5a645c59d8-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a9a6873a-570e-43e5-b185-3d5a645c59d8" (UID: "a9a6873a-570e-43e5-b185-3d5a645c59d8"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.263454 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6df31e83-1f86-4266-be9f-7e8dbfd25922-kube-api-access-6q5xj" (OuterVolumeSpecName: "kube-api-access-6q5xj") pod "6df31e83-1f86-4266-be9f-7e8dbfd25922" (UID: "6df31e83-1f86-4266-be9f-7e8dbfd25922"). InnerVolumeSpecName "kube-api-access-6q5xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.279176 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6df31e83-1f86-4266-be9f-7e8dbfd25922" (UID: "6df31e83-1f86-4266-be9f-7e8dbfd25922"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.288584 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" (UID: "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.289520 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-config-data" (OuterVolumeSpecName: "config-data") pod "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" (UID: "6fdeb1d8-5803-47be-8625-68aeaf6a8ef2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.308895 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-config" (OuterVolumeSpecName: "config") pod "6df31e83-1f86-4266-be9f-7e8dbfd25922" (UID: "6df31e83-1f86-4266-be9f-7e8dbfd25922"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310402 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99ff9621-8520-4d76-9db8-87bb562c6499-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310425 4799 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/99ff9621-8520-4d76-9db8-87bb562c6499-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310436 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310445 4799 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9a6873a-570e-43e5-b185-3d5a645c59d8-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310453 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92j4q\" (UniqueName: \"kubernetes.io/projected/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-kube-api-access-92j4q\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310461 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9a6873a-570e-43e5-b185-3d5a645c59d8-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310468 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9a6873a-570e-43e5-b185-3d5a645c59d8-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310476 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310483 4799 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310491 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310498 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310505 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/99ff9621-8520-4d76-9db8-87bb562c6499-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310512 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310521 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310529 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xft2f\" (UniqueName: \"kubernetes.io/projected/a9a6873a-570e-43e5-b185-3d5a645c59d8-kube-api-access-xft2f\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310537 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t28nv\" (UniqueName: \"kubernetes.io/projected/99ff9621-8520-4d76-9db8-87bb562c6499-kube-api-access-t28nv\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310547 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310555 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q5xj\" (UniqueName: \"kubernetes.io/projected/6df31e83-1f86-4266-be9f-7e8dbfd25922-kube-api-access-6q5xj\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.310562 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.314865 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6df31e83-1f86-4266-be9f-7e8dbfd25922" (UID: "6df31e83-1f86-4266-be9f-7e8dbfd25922"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.315919 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6df31e83-1f86-4266-be9f-7e8dbfd25922" (UID: "6df31e83-1f86-4266-be9f-7e8dbfd25922"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.412412 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.412446 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6df31e83-1f86-4266-be9f-7e8dbfd25922-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.594636 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-42vhg" event={"ID":"6fdeb1d8-5803-47be-8625-68aeaf6a8ef2","Type":"ContainerDied","Data":"bff3d2b416977d0b9f17726d2052e772d5ade951784a5a87840fbb1a77ba4ad3"} Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.594686 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bff3d2b416977d0b9f17726d2052e772d5ade951784a5a87840fbb1a77ba4ad3" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.594659 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-42vhg" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.596415 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-847fc5fb45-sxffr" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.596431 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-847fc5fb45-sxffr" event={"ID":"99ff9621-8520-4d76-9db8-87bb562c6499","Type":"ContainerDied","Data":"ca3d8a4f28a7554f0e8bde0e6357a5e1d22d13bf1b011e3af02bc96d19773084"} Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.599307 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-645b97c799-l2dkc" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.599326 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-645b97c799-l2dkc" event={"ID":"a9a6873a-570e-43e5-b185-3d5a645c59d8","Type":"ContainerDied","Data":"46d7a78057efda9b20a9c73374575af33c283ea6caece5aa6aa817ae9368ad6b"} Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.607951 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" event={"ID":"6df31e83-1f86-4266-be9f-7e8dbfd25922","Type":"ContainerDied","Data":"e5290dcdcab23dc63be0e09fbdd695b3a75d071b8d0dbe882deefd5fbdc7a4cb"} Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.608037 4799 scope.go:117] "RemoveContainer" containerID="a7dfc94a5d699e7957cc86850a5491be3a74a7e79229572171d019076121f11c" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.607971 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.715670 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-847fc5fb45-sxffr"] Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.746788 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-847fc5fb45-sxffr"] Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.798365 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-645b97c799-l2dkc"] Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.815999 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-645b97c799-l2dkc"] Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.828373 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-648b6fc9cc-db49n"] Jan 21 17:52:29 crc kubenswrapper[4799]: I0121 17:52:29.837358 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-648b6fc9cc-db49n"] Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.221393 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" path="/var/lib/kubelet/pods/6df31e83-1f86-4266-be9f-7e8dbfd25922/volumes" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.222822 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99ff9621-8520-4d76-9db8-87bb562c6499" path="/var/lib/kubelet/pods/99ff9621-8520-4d76-9db8-87bb562c6499/volumes" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.223539 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9a6873a-570e-43e5-b185-3d5a645c59d8" path="/var/lib/kubelet/pods/a9a6873a-570e-43e5-b185-3d5a645c59d8/volumes" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.312074 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-42vhg"] Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.321279 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-42vhg"] Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.413116 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-q4lsr"] Jan 21 17:52:30 crc kubenswrapper[4799]: E0121 17:52:30.413640 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="init" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.413665 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="init" Jan 21 17:52:30 crc kubenswrapper[4799]: E0121 17:52:30.413687 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" containerName="keystone-bootstrap" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.413694 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" containerName="keystone-bootstrap" Jan 21 17:52:30 crc kubenswrapper[4799]: E0121 17:52:30.413709 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="dnsmasq-dns" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.413717 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="dnsmasq-dns" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.414002 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" containerName="keystone-bootstrap" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.414033 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="dnsmasq-dns" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.414817 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: E0121 17:52:30.419957 4799 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Jan 21 17:52:30 crc kubenswrapper[4799]: E0121 17:52:30.419993 4799 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.30:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Jan 21 17:52:30 crc kubenswrapper[4799]: E0121 17:52:30.420110 4799 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:38.102.83.30:5001/podified-master-centos10/openstack-cinder-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f8gsc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-s94pl_openstack(45457092-3e80-4528-99f1-b1f5f1c2f128): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 21 17:52:30 crc kubenswrapper[4799]: E0121 17:52:30.423371 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-s94pl" podUID="45457092-3e80-4528-99f1-b1f5f1c2f128" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.423757 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.424540 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.424676 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.425190 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-phg79" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.426144 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.436121 4799 scope.go:117] "RemoveContainer" containerID="72125dca36af8bb1595f9e481e26db8856da0c5ddea192593eebbc6397381ffa" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.441255 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-config-data\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.441327 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-fernet-keys\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.441373 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfjw9\" (UniqueName: \"kubernetes.io/projected/4c14e317-6824-489e-9c4a-e7cf337c2439-kube-api-access-rfjw9\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.441427 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-scripts\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.441468 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-combined-ca-bundle\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.441498 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-credential-keys\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.459844 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q4lsr"] Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.544214 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-scripts\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.544377 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-combined-ca-bundle\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.544460 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-credential-keys\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.544549 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-config-data\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.544630 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-fernet-keys\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.544851 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfjw9\" (UniqueName: \"kubernetes.io/projected/4c14e317-6824-489e-9c4a-e7cf337c2439-kube-api-access-rfjw9\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.548168 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-scripts\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.549471 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-credential-keys\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.551505 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-combined-ca-bundle\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.556495 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-config-data\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.559684 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-fernet-keys\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.563761 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfjw9\" (UniqueName: \"kubernetes.io/projected/4c14e317-6824-489e-9c4a-e7cf337c2439-kube-api-access-rfjw9\") pod \"keystone-bootstrap-q4lsr\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.622246 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74d95c7fcf-hmbcl" event={"ID":"534362cc-8f86-49f5-95af-2027f8f64b0a","Type":"ContainerDied","Data":"5e4674308dff9469c0d6f1ab11bc86f7d92cfe5e0f51e4dbe94011d3c17a138d"} Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.622292 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e4674308dff9469c0d6f1ab11bc86f7d92cfe5e0f51e4dbe94011d3c17a138d" Jan 21 17:52:30 crc kubenswrapper[4799]: E0121 17:52:30.627119 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.30:5001/podified-master-centos10/openstack-cinder-api:watcher_latest\\\"\"" pod="openstack/cinder-db-sync-s94pl" podUID="45457092-3e80-4528-99f1-b1f5f1c2f128" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.743260 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.743402 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.854324 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9rg9\" (UniqueName: \"kubernetes.io/projected/534362cc-8f86-49f5-95af-2027f8f64b0a-kube-api-access-k9rg9\") pod \"534362cc-8f86-49f5-95af-2027f8f64b0a\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.854447 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-config-data\") pod \"534362cc-8f86-49f5-95af-2027f8f64b0a\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.854515 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/534362cc-8f86-49f5-95af-2027f8f64b0a-horizon-secret-key\") pod \"534362cc-8f86-49f5-95af-2027f8f64b0a\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.854616 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-scripts\") pod \"534362cc-8f86-49f5-95af-2027f8f64b0a\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.854663 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/534362cc-8f86-49f5-95af-2027f8f64b0a-logs\") pod \"534362cc-8f86-49f5-95af-2027f8f64b0a\" (UID: \"534362cc-8f86-49f5-95af-2027f8f64b0a\") " Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.863779 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-scripts" (OuterVolumeSpecName: "scripts") pod "534362cc-8f86-49f5-95af-2027f8f64b0a" (UID: "534362cc-8f86-49f5-95af-2027f8f64b0a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.863984 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-config-data" (OuterVolumeSpecName: "config-data") pod "534362cc-8f86-49f5-95af-2027f8f64b0a" (UID: "534362cc-8f86-49f5-95af-2027f8f64b0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.864383 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/534362cc-8f86-49f5-95af-2027f8f64b0a-logs" (OuterVolumeSpecName: "logs") pod "534362cc-8f86-49f5-95af-2027f8f64b0a" (UID: "534362cc-8f86-49f5-95af-2027f8f64b0a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.866477 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/534362cc-8f86-49f5-95af-2027f8f64b0a-kube-api-access-k9rg9" (OuterVolumeSpecName: "kube-api-access-k9rg9") pod "534362cc-8f86-49f5-95af-2027f8f64b0a" (UID: "534362cc-8f86-49f5-95af-2027f8f64b0a"). InnerVolumeSpecName "kube-api-access-k9rg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.868469 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/534362cc-8f86-49f5-95af-2027f8f64b0a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "534362cc-8f86-49f5-95af-2027f8f64b0a" (UID: "534362cc-8f86-49f5-95af-2027f8f64b0a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.924637 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7d9c7df8bb-b2r9b"] Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.957270 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.957306 4799 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/534362cc-8f86-49f5-95af-2027f8f64b0a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.957319 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/534362cc-8f86-49f5-95af-2027f8f64b0a-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.957329 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/534362cc-8f86-49f5-95af-2027f8f64b0a-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.957337 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9rg9\" (UniqueName: \"kubernetes.io/projected/534362cc-8f86-49f5-95af-2027f8f64b0a-kube-api-access-k9rg9\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:30 crc kubenswrapper[4799]: I0121 17:52:30.973936 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-585ff694b6-5fph4"] Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.257080 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q4lsr"] Jan 21 17:52:31 crc kubenswrapper[4799]: W0121 17:52:31.266931 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c14e317_6824_489e_9c4a_e7cf337c2439.slice/crio-0ea1c293e682d703f31763559772656da1738dcda7f04787cf387621d889d2a7 WatchSource:0}: Error finding container 0ea1c293e682d703f31763559772656da1738dcda7f04787cf387621d889d2a7: Status 404 returned error can't find the container with id 0ea1c293e682d703f31763559772656da1738dcda7f04787cf387621d889d2a7 Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.641497 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4lsr" event={"ID":"4c14e317-6824-489e-9c4a-e7cf337c2439","Type":"ContainerStarted","Data":"0ea1c293e682d703f31763559772656da1738dcda7f04787cf387621d889d2a7"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.656095 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vkhcg" event={"ID":"d7930da5-46c0-4cc3-a63a-316aff9f5b3a","Type":"ContainerStarted","Data":"45f86041843fab27b22ec70bb3c93e872b5440d034d491e2aab2734943be1c66"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.664932 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerStarted","Data":"ab1356f09185e5bcd03fc2bf91986077d64da5b25ade94ba4110f03988ec963f"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.677942 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-h8tvt" event={"ID":"9e9e3984-f73c-4a6d-8d5d-107481439374","Type":"ContainerStarted","Data":"22e51f92a929b9cd5d7e5262c57ad52cae081aac91b8a43a7ac2bf2cd078314c"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.679990 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-vkhcg" podStartSLOduration=4.014765801 podStartE2EDuration="31.679975419s" podCreationTimestamp="2026-01-21 17:52:00 +0000 UTC" firstStartedPulling="2026-01-21 17:52:02.754638834 +0000 UTC m=+1149.380928857" lastFinishedPulling="2026-01-21 17:52:30.419848452 +0000 UTC m=+1177.046138475" observedRunningTime="2026-01-21 17:52:31.678201609 +0000 UTC m=+1178.304491632" watchObservedRunningTime="2026-01-21 17:52:31.679975419 +0000 UTC m=+1178.306265442" Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.687409 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-74m2t" event={"ID":"482b08ae-060f-465a-9085-20d742c22a13","Type":"ContainerStarted","Data":"63f960c35f097c4f3ff07c7a6040984afd5aaa1799cd3cbad02d5e2b724834d1"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.689447 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-6zwqt" event={"ID":"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd","Type":"ContainerStarted","Data":"e3da8b23a9adbcf3871c602e691f4eeca53327e4701a0d57d505521c4ed93547"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.691616 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d9c7df8bb-b2r9b" event={"ID":"fde84d23-f64f-4299-af94-1d29894acdc0","Type":"ContainerStarted","Data":"11f21f7e5deaa70ee0d77740377532b2349b1405cb6eb0d5c203aacc4806a2a7"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.691644 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d9c7df8bb-b2r9b" event={"ID":"fde84d23-f64f-4299-af94-1d29894acdc0","Type":"ContainerStarted","Data":"779e83bd8e5768e28f3d74bd9085d854a207a29d6c1165c9c7a49b899c0dd19a"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.692548 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74d95c7fcf-hmbcl" Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.693262 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-585ff694b6-5fph4" event={"ID":"b8391139-71cc-48bb-af31-242cebaea8de","Type":"ContainerStarted","Data":"2e188441e14cf0fa1e0bd6092dd1eb7973ae0e209e949495bb0eda898d8aa529"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.693326 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-585ff694b6-5fph4" event={"ID":"b8391139-71cc-48bb-af31-242cebaea8de","Type":"ContainerStarted","Data":"fb1b8d01b0e1aba70e56b34b930e2ea40459ff91070c2955ad86363abd29d9f7"} Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.703693 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-h8tvt" podStartSLOduration=5.052050017 podStartE2EDuration="31.703648901s" podCreationTimestamp="2026-01-21 17:52:00 +0000 UTC" firstStartedPulling="2026-01-21 17:52:02.34000435 +0000 UTC m=+1148.966294373" lastFinishedPulling="2026-01-21 17:52:28.991603234 +0000 UTC m=+1175.617893257" observedRunningTime="2026-01-21 17:52:31.697632072 +0000 UTC m=+1178.323922095" watchObservedRunningTime="2026-01-21 17:52:31.703648901 +0000 UTC m=+1178.329938924" Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.730393 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-74m2t" podStartSLOduration=3.2054299840000002 podStartE2EDuration="1m4.730368698s" podCreationTimestamp="2026-01-21 17:51:27 +0000 UTC" firstStartedPulling="2026-01-21 17:51:28.893841849 +0000 UTC m=+1115.520131872" lastFinishedPulling="2026-01-21 17:52:30.418780553 +0000 UTC m=+1177.045070586" observedRunningTime="2026-01-21 17:52:31.722013274 +0000 UTC m=+1178.348303297" watchObservedRunningTime="2026-01-21 17:52:31.730368698 +0000 UTC m=+1178.356658711" Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.796088 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-6zwqt" podStartSLOduration=2.982478643 podStartE2EDuration="1m0.796063056s" podCreationTimestamp="2026-01-21 17:51:31 +0000 UTC" firstStartedPulling="2026-01-21 17:51:32.631190887 +0000 UTC m=+1119.257480910" lastFinishedPulling="2026-01-21 17:52:30.4447753 +0000 UTC m=+1177.071065323" observedRunningTime="2026-01-21 17:52:31.762037464 +0000 UTC m=+1178.388327487" watchObservedRunningTime="2026-01-21 17:52:31.796063056 +0000 UTC m=+1178.422353079" Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.840147 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74d95c7fcf-hmbcl"] Jan 21 17:52:31 crc kubenswrapper[4799]: I0121 17:52:31.851593 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-74d95c7fcf-hmbcl"] Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.216947 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="534362cc-8f86-49f5-95af-2027f8f64b0a" path="/var/lib/kubelet/pods/534362cc-8f86-49f5-95af-2027f8f64b0a/volumes" Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.217388 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fdeb1d8-5803-47be-8625-68aeaf6a8ef2" path="/var/lib/kubelet/pods/6fdeb1d8-5803-47be-8625-68aeaf6a8ef2/volumes" Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.436306 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-648b6fc9cc-db49n" podUID="6df31e83-1f86-4266-be9f-7e8dbfd25922" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: i/o timeout" Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.706712 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerStarted","Data":"8f23bc960d06b776a628754f163280218abe7c353c40a867d70e231f9308d1ec"} Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.711519 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-585ff694b6-5fph4" event={"ID":"b8391139-71cc-48bb-af31-242cebaea8de","Type":"ContainerStarted","Data":"8a9580f2e73ec8f87f7368c9e16cc98d8df3c55aef624166ede21bf96c837e15"} Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.713310 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d9c7df8bb-b2r9b" event={"ID":"fde84d23-f64f-4299-af94-1d29894acdc0","Type":"ContainerStarted","Data":"e331da72beab4e6eb2351cf8e7e1bf76b4b5b46cb290cf8f998552993f9545c4"} Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.716889 4799 generic.go:334] "Generic (PLEG): container finished" podID="fba342a8-536f-4c59-bb2c-44984e0a7fe0" containerID="1cc27ab2ad3ef045292ec06cc1fa63b7684e9e05a018b5c43b21dafae099ee42" exitCode=0 Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.716969 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kdwcc" event={"ID":"fba342a8-536f-4c59-bb2c-44984e0a7fe0","Type":"ContainerDied","Data":"1cc27ab2ad3ef045292ec06cc1fa63b7684e9e05a018b5c43b21dafae099ee42"} Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.720030 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4lsr" event={"ID":"4c14e317-6824-489e-9c4a-e7cf337c2439","Type":"ContainerStarted","Data":"02c152ea9b9ec58ea9d61e88f24f232abc6fd9c319333c447f47c7705ddfd950"} Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.737832 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-585ff694b6-5fph4" podStartSLOduration=23.737808875 podStartE2EDuration="23.737808875s" podCreationTimestamp="2026-01-21 17:52:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:32.736094447 +0000 UTC m=+1179.362384680" watchObservedRunningTime="2026-01-21 17:52:32.737808875 +0000 UTC m=+1179.364098898" Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.792352 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7d9c7df8bb-b2r9b" podStartSLOduration=23.700816981 podStartE2EDuration="23.79229962s" podCreationTimestamp="2026-01-21 17:52:09 +0000 UTC" firstStartedPulling="2026-01-21 17:52:30.942378088 +0000 UTC m=+1177.568668111" lastFinishedPulling="2026-01-21 17:52:31.033860727 +0000 UTC m=+1177.660150750" observedRunningTime="2026-01-21 17:52:32.779996275 +0000 UTC m=+1179.406286308" watchObservedRunningTime="2026-01-21 17:52:32.79229962 +0000 UTC m=+1179.418589663" Jan 21 17:52:32 crc kubenswrapper[4799]: I0121 17:52:32.814748 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-q4lsr" podStartSLOduration=2.814702306 podStartE2EDuration="2.814702306s" podCreationTimestamp="2026-01-21 17:52:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:32.805711935 +0000 UTC m=+1179.432002068" watchObservedRunningTime="2026-01-21 17:52:32.814702306 +0000 UTC m=+1179.440992329" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.092470 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.139688 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5v5j\" (UniqueName: \"kubernetes.io/projected/fba342a8-536f-4c59-bb2c-44984e0a7fe0-kube-api-access-k5v5j\") pod \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.139769 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-combined-ca-bundle\") pod \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.139814 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-config\") pod \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\" (UID: \"fba342a8-536f-4c59-bb2c-44984e0a7fe0\") " Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.152828 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fba342a8-536f-4c59-bb2c-44984e0a7fe0-kube-api-access-k5v5j" (OuterVolumeSpecName: "kube-api-access-k5v5j") pod "fba342a8-536f-4c59-bb2c-44984e0a7fe0" (UID: "fba342a8-536f-4c59-bb2c-44984e0a7fe0"). InnerVolumeSpecName "kube-api-access-k5v5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.169455 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-config" (OuterVolumeSpecName: "config") pod "fba342a8-536f-4c59-bb2c-44984e0a7fe0" (UID: "fba342a8-536f-4c59-bb2c-44984e0a7fe0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.169945 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fba342a8-536f-4c59-bb2c-44984e0a7fe0" (UID: "fba342a8-536f-4c59-bb2c-44984e0a7fe0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.243811 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5v5j\" (UniqueName: \"kubernetes.io/projected/fba342a8-536f-4c59-bb2c-44984e0a7fe0-kube-api-access-k5v5j\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.243856 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.243869 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/fba342a8-536f-4c59-bb2c-44984e0a7fe0-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.741223 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kdwcc" event={"ID":"fba342a8-536f-4c59-bb2c-44984e0a7fe0","Type":"ContainerDied","Data":"cf96656d491643735efedcdba6d1259ab85b9e0ca667215ebb26d28b6aa3f623"} Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.741572 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf96656d491643735efedcdba6d1259ab85b9e0ca667215ebb26d28b6aa3f623" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:34.741428 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kdwcc" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.027313 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-568c9b8bff-xjtgp"] Jan 21 17:52:36 crc kubenswrapper[4799]: E0121 17:52:35.027758 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fba342a8-536f-4c59-bb2c-44984e0a7fe0" containerName="neutron-db-sync" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.027770 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fba342a8-536f-4c59-bb2c-44984e0a7fe0" containerName="neutron-db-sync" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.027939 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fba342a8-536f-4c59-bb2c-44984e0a7fe0" containerName="neutron-db-sync" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.031392 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.064923 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-swift-storage-0\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.064974 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-svc\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.065035 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-config\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.065058 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bschv\" (UniqueName: \"kubernetes.io/projected/2495bb43-91f2-4311-9703-6b80621c59b8-kube-api-access-bschv\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.065142 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-sb\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.065169 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-nb\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.074268 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568c9b8bff-xjtgp"] Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.159841 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7c9474f76d-ptsv9"] Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.161563 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168082 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168098 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168083 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168394 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-swift-storage-0\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168407 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-8wxcv" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-svc\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168514 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-config\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bschv\" (UniqueName: \"kubernetes.io/projected/2495bb43-91f2-4311-9703-6b80621c59b8-kube-api-access-bschv\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168592 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-sb\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.168612 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-nb\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.169355 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-svc\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.170913 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-config\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.171025 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-swift-storage-0\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.171640 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c9474f76d-ptsv9"] Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.172694 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-sb\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.177388 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-nb\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.216029 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bschv\" (UniqueName: \"kubernetes.io/projected/2495bb43-91f2-4311-9703-6b80621c59b8-kube-api-access-bschv\") pod \"dnsmasq-dns-568c9b8bff-xjtgp\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.270483 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-httpd-config\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.270837 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-combined-ca-bundle\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.270984 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-ovndb-tls-certs\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.271479 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-config\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.272785 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj7tf\" (UniqueName: \"kubernetes.io/projected/57391f37-88fc-4dca-9afd-159d78c47ca1-kube-api-access-xj7tf\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.363954 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.381482 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-httpd-config\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.381696 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-combined-ca-bundle\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.381786 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-ovndb-tls-certs\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.381971 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-config\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.382075 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj7tf\" (UniqueName: \"kubernetes.io/projected/57391f37-88fc-4dca-9afd-159d78c47ca1-kube-api-access-xj7tf\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.387176 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-httpd-config\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.387521 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-combined-ca-bundle\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.387921 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-ovndb-tls-certs\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.393239 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-config\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.412651 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj7tf\" (UniqueName: \"kubernetes.io/projected/57391f37-88fc-4dca-9afd-159d78c47ca1-kube-api-access-xj7tf\") pod \"neutron-7c9474f76d-ptsv9\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:35.486753 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:36 crc kubenswrapper[4799]: I0121 17:52:36.806731 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568c9b8bff-xjtgp"] Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.039401 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c9474f76d-ptsv9"] Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.517049 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7644966657-gcssj"] Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.519290 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.521579 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.521930 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.530286 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7644966657-gcssj"] Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.672563 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-public-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.672619 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-internal-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.672645 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-config\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.672992 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdxxz\" (UniqueName: \"kubernetes.io/projected/95553e27-38f3-4a0d-a382-d87410ca7ec3-kube-api-access-pdxxz\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.673050 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-combined-ca-bundle\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.673170 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-httpd-config\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.673286 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-ovndb-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.775543 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-ovndb-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.775649 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-public-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.775666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-internal-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.775686 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-config\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.775761 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdxxz\" (UniqueName: \"kubernetes.io/projected/95553e27-38f3-4a0d-a382-d87410ca7ec3-kube-api-access-pdxxz\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.775779 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-combined-ca-bundle\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.775810 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-httpd-config\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.783782 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-internal-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.784637 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-httpd-config\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.791729 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-config\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.792561 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-public-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.793602 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-ovndb-tls-certs\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.794135 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95553e27-38f3-4a0d-a382-d87410ca7ec3-combined-ca-bundle\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.798089 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdxxz\" (UniqueName: \"kubernetes.io/projected/95553e27-38f3-4a0d-a382-d87410ca7ec3-kube-api-access-pdxxz\") pod \"neutron-7644966657-gcssj\" (UID: \"95553e27-38f3-4a0d-a382-d87410ca7ec3\") " pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:37 crc kubenswrapper[4799]: I0121 17:52:37.878223 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:38 crc kubenswrapper[4799]: I0121 17:52:38.789040 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" event={"ID":"2495bb43-91f2-4311-9703-6b80621c59b8","Type":"ContainerStarted","Data":"1249d3154a473d5df91664a62ba0b6c23067c1a12ad19262bfe180c4293f22d4"} Jan 21 17:52:38 crc kubenswrapper[4799]: I0121 17:52:38.793985 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9474f76d-ptsv9" event={"ID":"57391f37-88fc-4dca-9afd-159d78c47ca1","Type":"ContainerStarted","Data":"37fc0a1cb9cce7d310bfe07dd8eb0fd5efca1c4d94728b7e3867134d52c69a88"} Jan 21 17:52:39 crc kubenswrapper[4799]: I0121 17:52:39.806580 4799 generic.go:334] "Generic (PLEG): container finished" podID="9e9e3984-f73c-4a6d-8d5d-107481439374" containerID="22e51f92a929b9cd5d7e5262c57ad52cae081aac91b8a43a7ac2bf2cd078314c" exitCode=0 Jan 21 17:52:39 crc kubenswrapper[4799]: I0121 17:52:39.806760 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-h8tvt" event={"ID":"9e9e3984-f73c-4a6d-8d5d-107481439374","Type":"ContainerDied","Data":"22e51f92a929b9cd5d7e5262c57ad52cae081aac91b8a43a7ac2bf2cd078314c"} Jan 21 17:52:39 crc kubenswrapper[4799]: I0121 17:52:39.809628 4799 generic.go:334] "Generic (PLEG): container finished" podID="4c14e317-6824-489e-9c4a-e7cf337c2439" containerID="02c152ea9b9ec58ea9d61e88f24f232abc6fd9c319333c447f47c7705ddfd950" exitCode=0 Jan 21 17:52:39 crc kubenswrapper[4799]: I0121 17:52:39.809696 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4lsr" event={"ID":"4c14e317-6824-489e-9c4a-e7cf337c2439","Type":"ContainerDied","Data":"02c152ea9b9ec58ea9d61e88f24f232abc6fd9c319333c447f47c7705ddfd950"} Jan 21 17:52:40 crc kubenswrapper[4799]: I0121 17:52:40.114498 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:40 crc kubenswrapper[4799]: I0121 17:52:40.114598 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:40 crc kubenswrapper[4799]: I0121 17:52:40.232481 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:40 crc kubenswrapper[4799]: I0121 17:52:40.232534 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:40 crc kubenswrapper[4799]: I0121 17:52:40.825937 4799 generic.go:334] "Generic (PLEG): container finished" podID="27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" containerID="e3da8b23a9adbcf3871c602e691f4eeca53327e4701a0d57d505521c4ed93547" exitCode=0 Jan 21 17:52:40 crc kubenswrapper[4799]: I0121 17:52:40.828064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-6zwqt" event={"ID":"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd","Type":"ContainerDied","Data":"e3da8b23a9adbcf3871c602e691f4eeca53327e4701a0d57d505521c4ed93547"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.031819 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7644966657-gcssj"] Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.391762 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.491504 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-combined-ca-bundle\") pod \"4c14e317-6824-489e-9c4a-e7cf337c2439\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.492557 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-config-data\") pod \"4c14e317-6824-489e-9c4a-e7cf337c2439\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.493597 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-scripts\") pod \"4c14e317-6824-489e-9c4a-e7cf337c2439\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.493646 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-credential-keys\") pod \"4c14e317-6824-489e-9c4a-e7cf337c2439\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.493714 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-fernet-keys\") pod \"4c14e317-6824-489e-9c4a-e7cf337c2439\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.493742 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfjw9\" (UniqueName: \"kubernetes.io/projected/4c14e317-6824-489e-9c4a-e7cf337c2439-kube-api-access-rfjw9\") pod \"4c14e317-6824-489e-9c4a-e7cf337c2439\" (UID: \"4c14e317-6824-489e-9c4a-e7cf337c2439\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.499030 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c14e317-6824-489e-9c4a-e7cf337c2439-kube-api-access-rfjw9" (OuterVolumeSpecName: "kube-api-access-rfjw9") pod "4c14e317-6824-489e-9c4a-e7cf337c2439" (UID: "4c14e317-6824-489e-9c4a-e7cf337c2439"). InnerVolumeSpecName "kube-api-access-rfjw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.499661 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-scripts" (OuterVolumeSpecName: "scripts") pod "4c14e317-6824-489e-9c4a-e7cf337c2439" (UID: "4c14e317-6824-489e-9c4a-e7cf337c2439"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.501686 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4c14e317-6824-489e-9c4a-e7cf337c2439" (UID: "4c14e317-6824-489e-9c4a-e7cf337c2439"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.501949 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4c14e317-6824-489e-9c4a-e7cf337c2439" (UID: "4c14e317-6824-489e-9c4a-e7cf337c2439"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.523612 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c14e317-6824-489e-9c4a-e7cf337c2439" (UID: "4c14e317-6824-489e-9c4a-e7cf337c2439"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.534361 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-config-data" (OuterVolumeSpecName: "config-data") pod "4c14e317-6824-489e-9c4a-e7cf337c2439" (UID: "4c14e317-6824-489e-9c4a-e7cf337c2439"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.597229 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.597271 4799 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.597284 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.597297 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfjw9\" (UniqueName: \"kubernetes.io/projected/4c14e317-6824-489e-9c4a-e7cf337c2439-kube-api-access-rfjw9\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.597309 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.597320 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c14e317-6824-489e-9c4a-e7cf337c2439-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.692694 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.698146 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqmch\" (UniqueName: \"kubernetes.io/projected/9e9e3984-f73c-4a6d-8d5d-107481439374-kube-api-access-vqmch\") pod \"9e9e3984-f73c-4a6d-8d5d-107481439374\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.698219 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-config-data\") pod \"9e9e3984-f73c-4a6d-8d5d-107481439374\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.704935 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e9e3984-f73c-4a6d-8d5d-107481439374-kube-api-access-vqmch" (OuterVolumeSpecName: "kube-api-access-vqmch") pod "9e9e3984-f73c-4a6d-8d5d-107481439374" (UID: "9e9e3984-f73c-4a6d-8d5d-107481439374"). InnerVolumeSpecName "kube-api-access-vqmch". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.733296 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-config-data" (OuterVolumeSpecName: "config-data") pod "9e9e3984-f73c-4a6d-8d5d-107481439374" (UID: "9e9e3984-f73c-4a6d-8d5d-107481439374"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.800695 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-scripts\") pod \"9e9e3984-f73c-4a6d-8d5d-107481439374\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.800762 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e9e3984-f73c-4a6d-8d5d-107481439374-logs\") pod \"9e9e3984-f73c-4a6d-8d5d-107481439374\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.800812 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-combined-ca-bundle\") pod \"9e9e3984-f73c-4a6d-8d5d-107481439374\" (UID: \"9e9e3984-f73c-4a6d-8d5d-107481439374\") " Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.801280 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqmch\" (UniqueName: \"kubernetes.io/projected/9e9e3984-f73c-4a6d-8d5d-107481439374-kube-api-access-vqmch\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.801304 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.801826 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e9e3984-f73c-4a6d-8d5d-107481439374-logs" (OuterVolumeSpecName: "logs") pod "9e9e3984-f73c-4a6d-8d5d-107481439374" (UID: "9e9e3984-f73c-4a6d-8d5d-107481439374"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.805013 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-scripts" (OuterVolumeSpecName: "scripts") pod "9e9e3984-f73c-4a6d-8d5d-107481439374" (UID: "9e9e3984-f73c-4a6d-8d5d-107481439374"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.828350 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e9e3984-f73c-4a6d-8d5d-107481439374" (UID: "9e9e3984-f73c-4a6d-8d5d-107481439374"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.841876 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-h8tvt" event={"ID":"9e9e3984-f73c-4a6d-8d5d-107481439374","Type":"ContainerDied","Data":"741a4528ebe59a5bd70b7d8b76d2a4eff7bee55a5e2f36bfd59f6d007e087bb6"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.842741 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="741a4528ebe59a5bd70b7d8b76d2a4eff7bee55a5e2f36bfd59f6d007e087bb6" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.842020 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-h8tvt" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.844676 4799 generic.go:334] "Generic (PLEG): container finished" podID="2495bb43-91f2-4311-9703-6b80621c59b8" containerID="d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83" exitCode=0 Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.844911 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" event={"ID":"2495bb43-91f2-4311-9703-6b80621c59b8","Type":"ContainerDied","Data":"d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.860937 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9474f76d-ptsv9" event={"ID":"57391f37-88fc-4dca-9afd-159d78c47ca1","Type":"ContainerStarted","Data":"e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.860987 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9474f76d-ptsv9" event={"ID":"57391f37-88fc-4dca-9afd-159d78c47ca1","Type":"ContainerStarted","Data":"07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.861222 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.867491 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7644966657-gcssj" event={"ID":"95553e27-38f3-4a0d-a382-d87410ca7ec3","Type":"ContainerStarted","Data":"510ee6eae5a3c8990aa325f0bc2472a95a24c67653f70b6b93bdfc9277429b5a"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.867549 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7644966657-gcssj" event={"ID":"95553e27-38f3-4a0d-a382-d87410ca7ec3","Type":"ContainerStarted","Data":"6c910b0547663497ad861515b1668822edb652f6620adab8217d3465962e227d"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.867565 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7644966657-gcssj" event={"ID":"95553e27-38f3-4a0d-a382-d87410ca7ec3","Type":"ContainerStarted","Data":"5552400d369da87efcaee518e21d4d9ea2b6171f23abc98303df7a2e96d2dde8"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.867795 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7644966657-gcssj" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.877381 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4lsr" event={"ID":"4c14e317-6824-489e-9c4a-e7cf337c2439","Type":"ContainerDied","Data":"0ea1c293e682d703f31763559772656da1738dcda7f04787cf387621d889d2a7"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.877430 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ea1c293e682d703f31763559772656da1738dcda7f04787cf387621d889d2a7" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.877494 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4lsr" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.889004 4799 generic.go:334] "Generic (PLEG): container finished" podID="d7930da5-46c0-4cc3-a63a-316aff9f5b3a" containerID="45f86041843fab27b22ec70bb3c93e872b5440d034d491e2aab2734943be1c66" exitCode=0 Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.889222 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vkhcg" event={"ID":"d7930da5-46c0-4cc3-a63a-316aff9f5b3a","Type":"ContainerDied","Data":"45f86041843fab27b22ec70bb3c93e872b5440d034d491e2aab2734943be1c66"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.898049 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerStarted","Data":"5db2fe2be2fee23676dafc35c5104e8f1d5377f39bc33ba233590f44fc108599"} Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.903448 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.903479 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e9e3984-f73c-4a6d-8d5d-107481439374-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:41 crc kubenswrapper[4799]: I0121 17:52:41.903490 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e9e3984-f73c-4a6d-8d5d-107481439374-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:41.972595 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7c9474f76d-ptsv9" podStartSLOduration=6.972568131 podStartE2EDuration="6.972568131s" podCreationTimestamp="2026-01-21 17:52:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:41.935415101 +0000 UTC m=+1188.561705134" watchObservedRunningTime="2026-01-21 17:52:41.972568131 +0000 UTC m=+1188.598858154" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.009475 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7644966657-gcssj" podStartSLOduration=5.009442932 podStartE2EDuration="5.009442932s" podCreationTimestamp="2026-01-21 17:52:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:42.006084928 +0000 UTC m=+1188.632374951" watchObservedRunningTime="2026-01-21 17:52:42.009442932 +0000 UTC m=+1188.635732955" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.097212 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-55f8df6d54-cffcw"] Jan 21 17:52:42 crc kubenswrapper[4799]: E0121 17:52:42.097774 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c14e317-6824-489e-9c4a-e7cf337c2439" containerName="keystone-bootstrap" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.097790 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c14e317-6824-489e-9c4a-e7cf337c2439" containerName="keystone-bootstrap" Jan 21 17:52:42 crc kubenswrapper[4799]: E0121 17:52:42.097801 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e9e3984-f73c-4a6d-8d5d-107481439374" containerName="placement-db-sync" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.097807 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e9e3984-f73c-4a6d-8d5d-107481439374" containerName="placement-db-sync" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.098036 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e9e3984-f73c-4a6d-8d5d-107481439374" containerName="placement-db-sync" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.098059 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c14e317-6824-489e-9c4a-e7cf337c2439" containerName="keystone-bootstrap" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.099228 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.110096 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-config-data\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.110174 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-internal-tls-certs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.110201 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55f8df6d54-cffcw"] Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.110235 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-logs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.110263 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlxwd\" (UniqueName: \"kubernetes.io/projected/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-kube-api-access-wlxwd\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.110291 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-public-tls-certs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.110475 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-scripts\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.110559 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-combined-ca-bundle\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.111394 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.111887 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ggtzr" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.112063 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.112288 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.112489 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.128252 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-86999674c5-gpgq6"] Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.129727 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.140624 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-86999674c5-gpgq6"] Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.147727 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.147889 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.148319 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-phg79" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.148464 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.148637 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.148742 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214046 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-config-data\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214125 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-internal-tls-certs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214196 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-scripts\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214266 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-logs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214296 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlxwd\" (UniqueName: \"kubernetes.io/projected/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-kube-api-access-wlxwd\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214335 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-public-tls-certs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214382 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-config-data\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214408 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-scripts\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214433 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-credential-keys\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214481 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-internal-tls-certs\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214516 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-combined-ca-bundle\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214579 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-combined-ca-bundle\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214626 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-public-tls-certs\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214660 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-fernet-keys\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.214696 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5nkk\" (UniqueName: \"kubernetes.io/projected/4cc3ff02-feee-4b55-a057-99380b99a10e-kube-api-access-b5nkk\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.220596 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-logs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.229664 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-internal-tls-certs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.229951 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-public-tls-certs\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.237898 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-combined-ca-bundle\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.239074 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-scripts\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.239693 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-config-data\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.253115 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlxwd\" (UniqueName: \"kubernetes.io/projected/d03c23b6-50c8-4a4b-b2ea-53c4a3010790-kube-api-access-wlxwd\") pod \"placement-55f8df6d54-cffcw\" (UID: \"d03c23b6-50c8-4a4b-b2ea-53c4a3010790\") " pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.317285 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-credential-keys\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.317366 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-internal-tls-certs\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.317453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-combined-ca-bundle\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.317482 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-public-tls-certs\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.317520 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-fernet-keys\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.317551 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5nkk\" (UniqueName: \"kubernetes.io/projected/4cc3ff02-feee-4b55-a057-99380b99a10e-kube-api-access-b5nkk\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.317682 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-scripts\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.318016 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-config-data\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.331814 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-credential-keys\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.336169 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-combined-ca-bundle\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.341436 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-public-tls-certs\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.341675 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-internal-tls-certs\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.341700 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-fernet-keys\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.342427 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-config-data\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.347644 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cc3ff02-feee-4b55-a057-99380b99a10e-scripts\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.357249 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5nkk\" (UniqueName: \"kubernetes.io/projected/4cc3ff02-feee-4b55-a057-99380b99a10e-kube-api-access-b5nkk\") pod \"keystone-86999674c5-gpgq6\" (UID: \"4cc3ff02-feee-4b55-a057-99380b99a10e\") " pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.548756 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.584753 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.796622 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.942431 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-config-data\") pod \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.942689 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-combined-ca-bundle\") pod \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.942941 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-db-sync-config-data\") pod \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " Jan 21 17:52:42 crc kubenswrapper[4799]: I0121 17:52:42.943003 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pr8b\" (UniqueName: \"kubernetes.io/projected/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-kube-api-access-9pr8b\") pod \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\" (UID: \"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd\") " Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.015145 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" event={"ID":"2495bb43-91f2-4311-9703-6b80621c59b8","Type":"ContainerStarted","Data":"2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe"} Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.015541 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.047043 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-kube-api-access-9pr8b" (OuterVolumeSpecName: "kube-api-access-9pr8b") pod "27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" (UID: "27ad5c23-b3d3-41a1-a4ae-4821eb3524fd"). InnerVolumeSpecName "kube-api-access-9pr8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.059332 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-6zwqt" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.059412 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-6zwqt" event={"ID":"27ad5c23-b3d3-41a1-a4ae-4821eb3524fd","Type":"ContainerDied","Data":"284e0f9598bf17a2ff5c8eabb7d3b8a6f95b84df2c8b707060dfb399b1234857"} Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.059454 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="284e0f9598bf17a2ff5c8eabb7d3b8a6f95b84df2c8b707060dfb399b1234857" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.070308 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" (UID: "27ad5c23-b3d3-41a1-a4ae-4821eb3524fd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.076404 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" (UID: "27ad5c23-b3d3-41a1-a4ae-4821eb3524fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.081822 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" podStartSLOduration=9.081769544 podStartE2EDuration="9.081769544s" podCreationTimestamp="2026-01-21 17:52:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:43.04799603 +0000 UTC m=+1189.674286053" watchObservedRunningTime="2026-01-21 17:52:43.081769544 +0000 UTC m=+1189.708059587" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.090577 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.090624 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.090641 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pr8b\" (UniqueName: \"kubernetes.io/projected/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-kube-api-access-9pr8b\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.173283 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-config-data" (OuterVolumeSpecName: "config-data") pod "27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" (UID: "27ad5c23-b3d3-41a1-a4ae-4821eb3524fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.224665 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.354168 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:52:43 crc kubenswrapper[4799]: E0121 17:52:43.354741 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" containerName="watcher-db-sync" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.354762 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" containerName="watcher-db-sync" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.354979 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" containerName="watcher-db-sync" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.356744 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.388940 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.404983 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.406962 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.434087 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.441210 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.487886 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.513216 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.514785 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.539702 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgszb\" (UniqueName: \"kubernetes.io/projected/e3555046-24d9-4700-bdb8-0a09c35f651a-kube-api-access-vgszb\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540238 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf49l\" (UniqueName: \"kubernetes.io/projected/68d6d069-6c63-4dab-8664-e9474f3615bb-kube-api-access-zf49l\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540262 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540290 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-config-data\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540324 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-config-data\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540374 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540421 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3555046-24d9-4700-bdb8-0a09c35f651a-logs\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68d6d069-6c63-4dab-8664-e9474f3615bb-logs\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.540516 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.541046 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.580439 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.614188 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-55f8df6d54-cffcw"] Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.641931 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642003 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642044 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgszb\" (UniqueName: \"kubernetes.io/projected/e3555046-24d9-4700-bdb8-0a09c35f651a-kube-api-access-vgszb\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642066 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf49l\" (UniqueName: \"kubernetes.io/projected/68d6d069-6c63-4dab-8664-e9474f3615bb-kube-api-access-zf49l\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642092 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642117 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-config-data\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642153 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-config-data\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642200 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64p86\" (UniqueName: \"kubernetes.io/projected/c37f7c3a-832c-4991-9fe0-6e923befb599-kube-api-access-64p86\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642239 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642279 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3555046-24d9-4700-bdb8-0a09c35f651a-logs\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642295 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c37f7c3a-832c-4991-9fe0-6e923befb599-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642312 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c37f7c3a-832c-4991-9fe0-6e923befb599-logs\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642336 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68d6d069-6c63-4dab-8664-e9474f3615bb-logs\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.642360 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c37f7c3a-832c-4991-9fe0-6e923befb599-config-data\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.646825 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68d6d069-6c63-4dab-8664-e9474f3615bb-logs\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.647204 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3555046-24d9-4700-bdb8-0a09c35f651a-logs\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.668148 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-config-data\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.668357 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.690193 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.690253 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgszb\" (UniqueName: \"kubernetes.io/projected/e3555046-24d9-4700-bdb8-0a09c35f651a-kube-api-access-vgszb\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.694503 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.699565 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-config-data\") pod \"watcher-decision-engine-0\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.700468 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf49l\" (UniqueName: \"kubernetes.io/projected/68d6d069-6c63-4dab-8664-e9474f3615bb-kube-api-access-zf49l\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.700727 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.709063 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.745338 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64p86\" (UniqueName: \"kubernetes.io/projected/c37f7c3a-832c-4991-9fe0-6e923befb599-kube-api-access-64p86\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.745401 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c37f7c3a-832c-4991-9fe0-6e923befb599-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.745427 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c37f7c3a-832c-4991-9fe0-6e923befb599-logs\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.745461 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c37f7c3a-832c-4991-9fe0-6e923befb599-config-data\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.747549 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c37f7c3a-832c-4991-9fe0-6e923befb599-logs\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.749545 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c37f7c3a-832c-4991-9fe0-6e923befb599-config-data\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.751469 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c37f7c3a-832c-4991-9fe0-6e923befb599-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.818253 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.854529 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64p86\" (UniqueName: \"kubernetes.io/projected/c37f7c3a-832c-4991-9fe0-6e923befb599-kube-api-access-64p86\") pod \"watcher-applier-0\" (UID: \"c37f7c3a-832c-4991-9fe0-6e923befb599\") " pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.900826 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Jan 21 17:52:43 crc kubenswrapper[4799]: I0121 17:52:43.935291 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.051376 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-combined-ca-bundle\") pod \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.051582 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-db-sync-config-data\") pod \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.051739 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm6w7\" (UniqueName: \"kubernetes.io/projected/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-kube-api-access-qm6w7\") pod \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\" (UID: \"d7930da5-46c0-4cc3-a63a-316aff9f5b3a\") " Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.069975 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-kube-api-access-qm6w7" (OuterVolumeSpecName: "kube-api-access-qm6w7") pod "d7930da5-46c0-4cc3-a63a-316aff9f5b3a" (UID: "d7930da5-46c0-4cc3-a63a-316aff9f5b3a"). InnerVolumeSpecName "kube-api-access-qm6w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.070091 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d7930da5-46c0-4cc3-a63a-316aff9f5b3a" (UID: "d7930da5-46c0-4cc3-a63a-316aff9f5b3a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.094294 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55f8df6d54-cffcw" event={"ID":"d03c23b6-50c8-4a4b-b2ea-53c4a3010790","Type":"ContainerStarted","Data":"cf69bca6987ee8d625ee4140df0d0ef75f2a7998f422d6d910423774eaa53a54"} Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.104399 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vkhcg" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.104554 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vkhcg" event={"ID":"d7930da5-46c0-4cc3-a63a-316aff9f5b3a","Type":"ContainerDied","Data":"9c56305d5af43e8d9a3b4e61477139a3e810e8bc3bbce08275eed291aae83545"} Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.104577 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c56305d5af43e8d9a3b4e61477139a3e810e8bc3bbce08275eed291aae83545" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.114371 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-86999674c5-gpgq6"] Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.154379 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm6w7\" (UniqueName: \"kubernetes.io/projected/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-kube-api-access-qm6w7\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.154408 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.167429 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7930da5-46c0-4cc3-a63a-316aff9f5b3a" (UID: "d7930da5-46c0-4cc3-a63a-316aff9f5b3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.265110 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7930da5-46c0-4cc3-a63a-316aff9f5b3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:44 crc kubenswrapper[4799]: W0121 17:52:44.313686 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cc3ff02_feee_4b55_a057_99380b99a10e.slice/crio-051112d26868993e02de3c0501a43a3561a1e4528d06f3f2c8f7d5722850428d WatchSource:0}: Error finding container 051112d26868993e02de3c0501a43a3561a1e4528d06f3f2c8f7d5722850428d: Status 404 returned error can't find the container with id 051112d26868993e02de3c0501a43a3561a1e4528d06f3f2c8f7d5722850428d Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.316613 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7f979ff5f7-qvdts"] Jan 21 17:52:44 crc kubenswrapper[4799]: E0121 17:52:44.337228 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7930da5-46c0-4cc3-a63a-316aff9f5b3a" containerName="barbican-db-sync" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.337262 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7930da5-46c0-4cc3-a63a-316aff9f5b3a" containerName="barbican-db-sync" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.337525 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7930da5-46c0-4cc3-a63a-316aff9f5b3a" containerName="barbican-db-sync" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.338514 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f979ff5f7-qvdts"] Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.338535 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn"] Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.348083 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.348674 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.364361 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.364556 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.365636 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn"] Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.420074 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482618 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bb30a38-ea0d-4580-9a41-326f00b5c149-logs\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482696 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-combined-ca-bundle\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482721 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-config-data-custom\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482755 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-logs\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482807 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-config-data\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482842 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-config-data-custom\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482883 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrx2k\" (UniqueName: \"kubernetes.io/projected/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-kube-api-access-wrx2k\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482930 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-config-data\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.482959 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chm57\" (UniqueName: \"kubernetes.io/projected/9bb30a38-ea0d-4580-9a41-326f00b5c149-kube-api-access-chm57\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.483025 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-combined-ca-bundle\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: W0121 17:52:44.512326 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68d6d069_6c63_4dab_8664_e9474f3615bb.slice/crio-49e9584ccaf705b302551ae75a1ffc855b8a214a8b43f49c629e31b2d76d2fa5 WatchSource:0}: Error finding container 49e9584ccaf705b302551ae75a1ffc855b8a214a8b43f49c629e31b2d76d2fa5: Status 404 returned error can't find the container with id 49e9584ccaf705b302551ae75a1ffc855b8a214a8b43f49c629e31b2d76d2fa5 Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.552874 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568c9b8bff-xjtgp"] Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.678651 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-config-data\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.678981 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chm57\" (UniqueName: \"kubernetes.io/projected/9bb30a38-ea0d-4580-9a41-326f00b5c149-kube-api-access-chm57\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.679138 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-combined-ca-bundle\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.679563 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bb30a38-ea0d-4580-9a41-326f00b5c149-logs\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.679656 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-combined-ca-bundle\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.679681 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-config-data-custom\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.679724 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-logs\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.680066 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-config-data\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.680123 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-config-data-custom\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.680329 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrx2k\" (UniqueName: \"kubernetes.io/projected/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-kube-api-access-wrx2k\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.709142 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-combined-ca-bundle\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.738272 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-logs\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.741405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-config-data-custom\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.741500 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bb30a38-ea0d-4580-9a41-326f00b5c149-logs\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.781106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bb30a38-ea0d-4580-9a41-326f00b5c149-config-data\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.782342 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-config-data\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.785191 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-config-data-custom\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.808350 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrx2k\" (UniqueName: \"kubernetes.io/projected/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-kube-api-access-wrx2k\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:44 crc kubenswrapper[4799]: I0121 17:52:44.812593 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chm57\" (UniqueName: \"kubernetes.io/projected/9bb30a38-ea0d-4580-9a41-326f00b5c149-kube-api-access-chm57\") pod \"barbican-keystone-listener-6b9d59f6f8-vl6sn\" (UID: \"9bb30a38-ea0d-4580-9a41-326f00b5c149\") " pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.019316 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7dc5147-addd-46d9-b5b3-3f328c0a5a94-combined-ca-bundle\") pod \"barbican-worker-7f979ff5f7-qvdts\" (UID: \"c7dc5147-addd-46d9-b5b3-3f328c0a5a94\") " pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.083334 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.089989 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f979ff5f7-qvdts" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.261730 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9d5d45775-xwjff"] Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.268586 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.276243 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9d5d45775-xwjff"] Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.321867 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5dcb766fd4-g29lg"] Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.332634 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.338020 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.349248 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-86999674c5-gpgq6" event={"ID":"4cc3ff02-feee-4b55-a057-99380b99a10e","Type":"ContainerStarted","Data":"051112d26868993e02de3c0501a43a3561a1e4528d06f3f2c8f7d5722850428d"} Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.379747 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-combined-ca-bundle\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.379853 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa02fc34-2263-47cb-90cf-7baedb10be5e-logs\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.379957 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-nb\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.380043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data-custom\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.380086 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-config\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.380180 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-sb\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.380206 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-svc\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.380280 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkmx7\" (UniqueName: \"kubernetes.io/projected/aa02fc34-2263-47cb-90cf-7baedb10be5e-kube-api-access-lkmx7\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.380317 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc7nk\" (UniqueName: \"kubernetes.io/projected/af0f2741-8c09-4555-b8c4-251b9a2de57e-kube-api-access-fc7nk\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.380364 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-swift-storage-0\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.380436 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.381219 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68d6d069-6c63-4dab-8664-e9474f3615bb","Type":"ContainerStarted","Data":"49e9584ccaf705b302551ae75a1ffc855b8a214a8b43f49c629e31b2d76d2fa5"} Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.384217 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dcb766fd4-g29lg"] Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.384682 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" podUID="2495bb43-91f2-4311-9703-6b80621c59b8" containerName="dnsmasq-dns" containerID="cri-o://2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe" gracePeriod=10 Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486071 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkmx7\" (UniqueName: \"kubernetes.io/projected/aa02fc34-2263-47cb-90cf-7baedb10be5e-kube-api-access-lkmx7\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486115 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc7nk\" (UniqueName: \"kubernetes.io/projected/af0f2741-8c09-4555-b8c4-251b9a2de57e-kube-api-access-fc7nk\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486219 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-swift-storage-0\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486262 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486302 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-combined-ca-bundle\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486322 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa02fc34-2263-47cb-90cf-7baedb10be5e-logs\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486368 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-nb\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486418 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data-custom\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486449 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-config\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486479 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-svc\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.486494 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-sb\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.517413 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-sb\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.526747 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa02fc34-2263-47cb-90cf-7baedb10be5e-logs\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.532731 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data-custom\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.546120 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-config\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.549391 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-nb\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.549505 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-swift-storage-0\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.550225 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-svc\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.556967 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.566955 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-combined-ca-bundle\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.568296 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkmx7\" (UniqueName: \"kubernetes.io/projected/aa02fc34-2263-47cb-90cf-7baedb10be5e-kube-api-access-lkmx7\") pod \"barbican-api-5dcb766fd4-g29lg\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.570368 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc7nk\" (UniqueName: \"kubernetes.io/projected/af0f2741-8c09-4555-b8c4-251b9a2de57e-kube-api-access-fc7nk\") pod \"dnsmasq-dns-9d5d45775-xwjff\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: W0121 17:52:45.631398 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc37f7c3a_832c_4991_9fe0_6e923befb599.slice/crio-8d078fdd9e88bb78776affbe4ab5502a9de93260576e69cee8b50a0cdd212cc1 WatchSource:0}: Error finding container 8d078fdd9e88bb78776affbe4ab5502a9de93260576e69cee8b50a0cdd212cc1: Status 404 returned error can't find the container with id 8d078fdd9e88bb78776affbe4ab5502a9de93260576e69cee8b50a0cdd212cc1 Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.633574 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.762654 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.763646 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:45 crc kubenswrapper[4799]: I0121 17:52:45.938682 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:52:45 crc kubenswrapper[4799]: E0121 17:52:45.949226 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2495bb43_91f2_4311_9703_6b80621c59b8.slice/crio-conmon-2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe.scope\": RecentStats: unable to find data in memory cache]" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.275295 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn"] Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.288832 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.393925 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bschv\" (UniqueName: \"kubernetes.io/projected/2495bb43-91f2-4311-9703-6b80621c59b8-kube-api-access-bschv\") pod \"2495bb43-91f2-4311-9703-6b80621c59b8\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.394319 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-swift-storage-0\") pod \"2495bb43-91f2-4311-9703-6b80621c59b8\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.394376 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-config\") pod \"2495bb43-91f2-4311-9703-6b80621c59b8\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.394641 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-nb\") pod \"2495bb43-91f2-4311-9703-6b80621c59b8\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.394681 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-sb\") pod \"2495bb43-91f2-4311-9703-6b80621c59b8\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.394711 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-svc\") pod \"2495bb43-91f2-4311-9703-6b80621c59b8\" (UID: \"2495bb43-91f2-4311-9703-6b80621c59b8\") " Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.407738 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2495bb43-91f2-4311-9703-6b80621c59b8-kube-api-access-bschv" (OuterVolumeSpecName: "kube-api-access-bschv") pod "2495bb43-91f2-4311-9703-6b80621c59b8" (UID: "2495bb43-91f2-4311-9703-6b80621c59b8"). InnerVolumeSpecName "kube-api-access-bschv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.412336 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68d6d069-6c63-4dab-8664-e9474f3615bb","Type":"ContainerStarted","Data":"19f9a7487e24e08486f9dc1a797402525d7d89048b1a73fa1b6e2cdb207073d1"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.422092 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" event={"ID":"9bb30a38-ea0d-4580-9a41-326f00b5c149","Type":"ContainerStarted","Data":"2ea78f65613f6eff4485590a5bd71c0faae533964326f263d74c378b6c04ecd1"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.429886 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"c37f7c3a-832c-4991-9fe0-6e923befb599","Type":"ContainerStarted","Data":"8d078fdd9e88bb78776affbe4ab5502a9de93260576e69cee8b50a0cdd212cc1"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.469217 4799 generic.go:334] "Generic (PLEG): container finished" podID="2495bb43-91f2-4311-9703-6b80621c59b8" containerID="2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe" exitCode=0 Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.469545 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" event={"ID":"2495bb43-91f2-4311-9703-6b80621c59b8","Type":"ContainerDied","Data":"2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.469657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" event={"ID":"2495bb43-91f2-4311-9703-6b80621c59b8","Type":"ContainerDied","Data":"1249d3154a473d5df91664a62ba0b6c23067c1a12ad19262bfe180c4293f22d4"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.469770 4799 scope.go:117] "RemoveContainer" containerID="2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.470031 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568c9b8bff-xjtgp" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.490315 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-s94pl" event={"ID":"45457092-3e80-4528-99f1-b1f5f1c2f128","Type":"ContainerStarted","Data":"a82d0b436041595b747a1b7b33caff03aa3ff204c8ecbb0708b450b6a580973c"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.494018 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2495bb43-91f2-4311-9703-6b80621c59b8" (UID: "2495bb43-91f2-4311-9703-6b80621c59b8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.496900 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bschv\" (UniqueName: \"kubernetes.io/projected/2495bb43-91f2-4311-9703-6b80621c59b8-kube-api-access-bschv\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.497043 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.498222 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55f8df6d54-cffcw" event={"ID":"d03c23b6-50c8-4a4b-b2ea-53c4a3010790","Type":"ContainerStarted","Data":"91ce8819a2cc3bed75abeb4763369591d7c8bc5f398ef503543acaa42948507e"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.499398 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2495bb43-91f2-4311-9703-6b80621c59b8" (UID: "2495bb43-91f2-4311-9703-6b80621c59b8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.499977 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2495bb43-91f2-4311-9703-6b80621c59b8" (UID: "2495bb43-91f2-4311-9703-6b80621c59b8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.500406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-86999674c5-gpgq6" event={"ID":"4cc3ff02-feee-4b55-a057-99380b99a10e","Type":"ContainerStarted","Data":"aa86018af1fb7a9af9a5f2d84251b454c2964588a1b052b4fe7eed14e0e29272"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.502631 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.521491 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-config" (OuterVolumeSpecName: "config") pod "2495bb43-91f2-4311-9703-6b80621c59b8" (UID: "2495bb43-91f2-4311-9703-6b80621c59b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.521793 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerStarted","Data":"c35795e1bae0c6341fcf99661e30668e0247e1b517364827c7fcb0ae934ea1b5"} Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.541050 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-86999674c5-gpgq6" podStartSLOduration=4.538503499 podStartE2EDuration="4.538503499s" podCreationTimestamp="2026-01-21 17:52:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:46.533931961 +0000 UTC m=+1193.160221984" watchObservedRunningTime="2026-01-21 17:52:46.538503499 +0000 UTC m=+1193.164793542" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.594986 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2495bb43-91f2-4311-9703-6b80621c59b8" (UID: "2495bb43-91f2-4311-9703-6b80621c59b8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.602118 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.602207 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.602227 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.602239 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2495bb43-91f2-4311-9703-6b80621c59b8-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.604172 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9d5d45775-xwjff"] Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.632743 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f979ff5f7-qvdts"] Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.867509 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dcb766fd4-g29lg"] Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.903178 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568c9b8bff-xjtgp"] Jan 21 17:52:46 crc kubenswrapper[4799]: I0121 17:52:46.929507 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-568c9b8bff-xjtgp"] Jan 21 17:52:47 crc kubenswrapper[4799]: W0121 17:52:47.514473 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa02fc34_2263_47cb_90cf_7baedb10be5e.slice/crio-f73abbce1d2408b8c314c73e0c563a67e9d0f9f93afbdc86bf6949ccc9c61af9 WatchSource:0}: Error finding container f73abbce1d2408b8c314c73e0c563a67e9d0f9f93afbdc86bf6949ccc9c61af9: Status 404 returned error can't find the container with id f73abbce1d2408b8c314c73e0c563a67e9d0f9f93afbdc86bf6949ccc9c61af9 Jan 21 17:52:47 crc kubenswrapper[4799]: I0121 17:52:47.524268 4799 scope.go:117] "RemoveContainer" containerID="d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83" Jan 21 17:52:47 crc kubenswrapper[4799]: I0121 17:52:47.549204 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dcb766fd4-g29lg" event={"ID":"aa02fc34-2263-47cb-90cf-7baedb10be5e","Type":"ContainerStarted","Data":"f73abbce1d2408b8c314c73e0c563a67e9d0f9f93afbdc86bf6949ccc9c61af9"} Jan 21 17:52:47 crc kubenswrapper[4799]: I0121 17:52:47.554538 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" event={"ID":"af0f2741-8c09-4555-b8c4-251b9a2de57e","Type":"ContainerStarted","Data":"dfdc723537c25f4eb0734ddc187e1c54c08757a211fae9eeb91a82ec6144a6f8"} Jan 21 17:52:47 crc kubenswrapper[4799]: I0121 17:52:47.571356 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f979ff5f7-qvdts" event={"ID":"c7dc5147-addd-46d9-b5b3-3f328c0a5a94","Type":"ContainerStarted","Data":"db03548f297a7afd647e8729ac422e116fe985aa461f8814bf04abaa18a57236"} Jan 21 17:52:47 crc kubenswrapper[4799]: I0121 17:52:47.627894 4799 scope.go:117] "RemoveContainer" containerID="2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe" Jan 21 17:52:47 crc kubenswrapper[4799]: E0121 17:52:47.630678 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe\": container with ID starting with 2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe not found: ID does not exist" containerID="2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe" Jan 21 17:52:47 crc kubenswrapper[4799]: I0121 17:52:47.630751 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe"} err="failed to get container status \"2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe\": rpc error: code = NotFound desc = could not find container \"2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe\": container with ID starting with 2f82bec28f3674b09d7b88205e36bf6634e9492c1bd9b3fa9b2e6a187f4e0dfe not found: ID does not exist" Jan 21 17:52:47 crc kubenswrapper[4799]: I0121 17:52:47.630796 4799 scope.go:117] "RemoveContainer" containerID="d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83" Jan 21 17:52:47 crc kubenswrapper[4799]: E0121 17:52:47.637437 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83\": container with ID starting with d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83 not found: ID does not exist" containerID="d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83" Jan 21 17:52:47 crc kubenswrapper[4799]: I0121 17:52:47.637529 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83"} err="failed to get container status \"d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83\": rpc error: code = NotFound desc = could not find container \"d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83\": container with ID starting with d016b0a000036db1b5d678ac68d37a7c49d9138845b37d21149a3404c4f5cc83 not found: ID does not exist" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.225763 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2495bb43-91f2-4311-9703-6b80621c59b8" path="/var/lib/kubelet/pods/2495bb43-91f2-4311-9703-6b80621c59b8/volumes" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.598639 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68d6d069-6c63-4dab-8664-e9474f3615bb","Type":"ContainerStarted","Data":"26bc35d63df5fd0e22af7d7ab2eaeb3a698196cd3595d2330f14569a0fc05e0f"} Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.600486 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.606512 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dcb766fd4-g29lg" event={"ID":"aa02fc34-2263-47cb-90cf-7baedb10be5e","Type":"ContainerStarted","Data":"04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21"} Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.639908 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=5.639885485 podStartE2EDuration="5.639885485s" podCreationTimestamp="2026-01-21 17:52:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:48.624050732 +0000 UTC m=+1195.250340755" watchObservedRunningTime="2026-01-21 17:52:48.639885485 +0000 UTC m=+1195.266175508" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.653352 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-55f8df6d54-cffcw" event={"ID":"d03c23b6-50c8-4a4b-b2ea-53c4a3010790","Type":"ContainerStarted","Data":"03a37a30587978fdcdbf5822392b445385a6f0cf4109c9be3a2770d21dcd4cbc"} Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.653739 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.653768 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.669487 4799 generic.go:334] "Generic (PLEG): container finished" podID="af0f2741-8c09-4555-b8c4-251b9a2de57e" containerID="e83eb221c56ae4873a8c5f4a34e4e1eed2f47fbf1c0d1c3d3e407ec8783d04e0" exitCode=0 Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.670349 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" event={"ID":"af0f2741-8c09-4555-b8c4-251b9a2de57e","Type":"ContainerDied","Data":"e83eb221c56ae4873a8c5f4a34e4e1eed2f47fbf1c0d1c3d3e407ec8783d04e0"} Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.703566 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-55f8df6d54-cffcw" podStartSLOduration=6.703532515 podStartE2EDuration="6.703532515s" podCreationTimestamp="2026-01-21 17:52:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:48.693811283 +0000 UTC m=+1195.320101326" watchObservedRunningTime="2026-01-21 17:52:48.703532515 +0000 UTC m=+1195.329822568" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.714246 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.768990 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-s94pl" podStartSLOduration=8.234971547 podStartE2EDuration="48.768967165s" podCreationTimestamp="2026-01-21 17:52:00 +0000 UTC" firstStartedPulling="2026-01-21 17:52:01.949205405 +0000 UTC m=+1148.575495428" lastFinishedPulling="2026-01-21 17:52:42.483201023 +0000 UTC m=+1189.109491046" observedRunningTime="2026-01-21 17:52:48.750685394 +0000 UTC m=+1195.376975427" watchObservedRunningTime="2026-01-21 17:52:48.768967165 +0000 UTC m=+1195.395257188" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.821479 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-cfcccc69b-6zwk4"] Jan 21 17:52:48 crc kubenswrapper[4799]: E0121 17:52:48.821983 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2495bb43-91f2-4311-9703-6b80621c59b8" containerName="dnsmasq-dns" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.822017 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2495bb43-91f2-4311-9703-6b80621c59b8" containerName="dnsmasq-dns" Jan 21 17:52:48 crc kubenswrapper[4799]: E0121 17:52:48.822056 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2495bb43-91f2-4311-9703-6b80621c59b8" containerName="init" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.822062 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="2495bb43-91f2-4311-9703-6b80621c59b8" containerName="init" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.822296 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="2495bb43-91f2-4311-9703-6b80621c59b8" containerName="dnsmasq-dns" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.823587 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.833465 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.834084 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.877450 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-cfcccc69b-6zwk4"] Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.978071 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-combined-ca-bundle\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.978195 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-config-data\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.978223 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48tqg\" (UniqueName: \"kubernetes.io/projected/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-kube-api-access-48tqg\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.978499 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-logs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.978770 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-public-tls-certs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.978854 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-internal-tls-certs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:48 crc kubenswrapper[4799]: I0121 17:52:48.978929 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-config-data-custom\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.147094 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-config-data\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.147209 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48tqg\" (UniqueName: \"kubernetes.io/projected/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-kube-api-access-48tqg\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.147243 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-logs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.147339 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-public-tls-certs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.147381 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-internal-tls-certs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.147423 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-config-data-custom\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.147472 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-combined-ca-bundle\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.152652 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-logs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.158863 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-internal-tls-certs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.161518 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-combined-ca-bundle\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.161750 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-public-tls-certs\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.161837 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-config-data\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.191007 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-config-data-custom\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.192905 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48tqg\" (UniqueName: \"kubernetes.io/projected/f4c9e3bf-79dd-49d5-af90-db5a6087f0f3-kube-api-access-48tqg\") pod \"barbican-api-cfcccc69b-6zwk4\" (UID: \"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3\") " pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.484943 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.703249 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" event={"ID":"af0f2741-8c09-4555-b8c4-251b9a2de57e","Type":"ContainerStarted","Data":"23932414f560b8204d12801ee82b40fe3fbc4348408ab863b26c4af0fd32d52a"} Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.703716 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.711913 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dcb766fd4-g29lg" event={"ID":"aa02fc34-2263-47cb-90cf-7baedb10be5e","Type":"ContainerStarted","Data":"61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5"} Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.712084 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.712114 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.748529 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" podStartSLOduration=5.748504403 podStartE2EDuration="5.748504403s" podCreationTimestamp="2026-01-21 17:52:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:49.724490811 +0000 UTC m=+1196.350780834" watchObservedRunningTime="2026-01-21 17:52:49.748504403 +0000 UTC m=+1196.374794426" Jan 21 17:52:49 crc kubenswrapper[4799]: I0121 17:52:49.775283 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5dcb766fd4-g29lg" podStartSLOduration=5.77521283 podStartE2EDuration="5.77521283s" podCreationTimestamp="2026-01-21 17:52:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:52:49.742602458 +0000 UTC m=+1196.368892481" watchObservedRunningTime="2026-01-21 17:52:49.77521283 +0000 UTC m=+1196.401502853" Jan 21 17:52:50 crc kubenswrapper[4799]: I0121 17:52:50.473186 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-cfcccc69b-6zwk4"] Jan 21 17:52:50 crc kubenswrapper[4799]: I0121 17:52:50.739943 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:52:51 crc kubenswrapper[4799]: W0121 17:52:51.872996 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4c9e3bf_79dd_49d5_af90_db5a6087f0f3.slice/crio-f8d21e06b33cbd4bcf2ed4ee4215ac1d8261866b78cb0e106a1c671f6aa1a1a3 WatchSource:0}: Error finding container f8d21e06b33cbd4bcf2ed4ee4215ac1d8261866b78cb0e106a1c671f6aa1a1a3: Status 404 returned error can't find the container with id f8d21e06b33cbd4bcf2ed4ee4215ac1d8261866b78cb0e106a1c671f6aa1a1a3 Jan 21 17:52:52 crc kubenswrapper[4799]: I0121 17:52:52.479325 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Jan 21 17:52:52 crc kubenswrapper[4799]: I0121 17:52:52.644037 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:52 crc kubenswrapper[4799]: I0121 17:52:52.668883 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:52 crc kubenswrapper[4799]: I0121 17:52:52.765733 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cfcccc69b-6zwk4" event={"ID":"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3","Type":"ContainerStarted","Data":"f8d21e06b33cbd4bcf2ed4ee4215ac1d8261866b78cb0e106a1c671f6aa1a1a3"} Jan 21 17:52:53 crc kubenswrapper[4799]: I0121 17:52:53.710520 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Jan 21 17:52:53 crc kubenswrapper[4799]: I0121 17:52:53.715960 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Jan 21 17:52:53 crc kubenswrapper[4799]: I0121 17:52:53.788514 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.178391 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.365561 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-585ff694b6-5fph4" Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.451020 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7d9c7df8bb-b2r9b"] Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.765663 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.863786 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77f6cc8899-p9cjd"] Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.864069 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" podUID="ec158420-9dbf-4413-bd52-5041a9cee032" containerName="dnsmasq-dns" containerID="cri-o://fb039c711b87ae36fb240da128277b0f719a0e4e12486da552724ebd7f6923c4" gracePeriod=10 Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.880183 4799 generic.go:334] "Generic (PLEG): container finished" podID="482b08ae-060f-465a-9085-20d742c22a13" containerID="63f960c35f097c4f3ff07c7a6040984afd5aaa1799cd3cbad02d5e2b724834d1" exitCode=0 Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.880240 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-74m2t" event={"ID":"482b08ae-060f-465a-9085-20d742c22a13","Type":"ContainerDied","Data":"63f960c35f097c4f3ff07c7a6040984afd5aaa1799cd3cbad02d5e2b724834d1"} Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.880426 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7d9c7df8bb-b2r9b" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon-log" containerID="cri-o://11f21f7e5deaa70ee0d77740377532b2349b1405cb6eb0d5c203aacc4806a2a7" gracePeriod=30 Jan 21 17:52:55 crc kubenswrapper[4799]: I0121 17:52:55.880542 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7d9c7df8bb-b2r9b" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon" containerID="cri-o://e331da72beab4e6eb2351cf8e7e1bf76b4b5b46cb290cf8f998552993f9545c4" gracePeriod=30 Jan 21 17:52:56 crc kubenswrapper[4799]: E0121 17:52:56.251635 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45457092_3e80_4528_99f1_b1f5f1c2f128.slice/crio-a82d0b436041595b747a1b7b33caff03aa3ff204c8ecbb0708b450b6a580973c.scope\": RecentStats: unable to find data in memory cache]" Jan 21 17:52:56 crc kubenswrapper[4799]: I0121 17:52:56.373057 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" podUID="ec158420-9dbf-4413-bd52-5041a9cee032" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.155:5353: connect: connection refused" Jan 21 17:52:56 crc kubenswrapper[4799]: I0121 17:52:56.899864 4799 generic.go:334] "Generic (PLEG): container finished" podID="45457092-3e80-4528-99f1-b1f5f1c2f128" containerID="a82d0b436041595b747a1b7b33caff03aa3ff204c8ecbb0708b450b6a580973c" exitCode=0 Jan 21 17:52:56 crc kubenswrapper[4799]: I0121 17:52:56.900228 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-s94pl" event={"ID":"45457092-3e80-4528-99f1-b1f5f1c2f128","Type":"ContainerDied","Data":"a82d0b436041595b747a1b7b33caff03aa3ff204c8ecbb0708b450b6a580973c"} Jan 21 17:52:56 crc kubenswrapper[4799]: I0121 17:52:56.905473 4799 generic.go:334] "Generic (PLEG): container finished" podID="ec158420-9dbf-4413-bd52-5041a9cee032" containerID="fb039c711b87ae36fb240da128277b0f719a0e4e12486da552724ebd7f6923c4" exitCode=0 Jan 21 17:52:56 crc kubenswrapper[4799]: I0121 17:52:56.905552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" event={"ID":"ec158420-9dbf-4413-bd52-5041a9cee032","Type":"ContainerDied","Data":"fb039c711b87ae36fb240da128277b0f719a0e4e12486da552724ebd7f6923c4"} Jan 21 17:52:57 crc kubenswrapper[4799]: I0121 17:52:57.562499 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:57 crc kubenswrapper[4799]: I0121 17:52:57.757421 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:52:57 crc kubenswrapper[4799]: I0121 17:52:57.926374 4799 generic.go:334] "Generic (PLEG): container finished" podID="fde84d23-f64f-4299-af94-1d29894acdc0" containerID="e331da72beab4e6eb2351cf8e7e1bf76b4b5b46cb290cf8f998552993f9545c4" exitCode=0 Jan 21 17:52:57 crc kubenswrapper[4799]: I0121 17:52:57.926608 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d9c7df8bb-b2r9b" event={"ID":"fde84d23-f64f-4299-af94-1d29894acdc0","Type":"ContainerDied","Data":"e331da72beab4e6eb2351cf8e7e1bf76b4b5b46cb290cf8f998552993f9545c4"} Jan 21 17:52:58 crc kubenswrapper[4799]: I0121 17:52:58.434963 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:52:58 crc kubenswrapper[4799]: I0121 17:52:58.435229 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api-log" containerID="cri-o://19f9a7487e24e08486f9dc1a797402525d7d89048b1a73fa1b6e2cdb207073d1" gracePeriod=30 Jan 21 17:52:58 crc kubenswrapper[4799]: I0121 17:52:58.435331 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api" containerID="cri-o://26bc35d63df5fd0e22af7d7ab2eaeb3a698196cd3595d2330f14569a0fc05e0f" gracePeriod=30 Jan 21 17:52:58 crc kubenswrapper[4799]: I0121 17:52:58.938619 4799 generic.go:334] "Generic (PLEG): container finished" podID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerID="19f9a7487e24e08486f9dc1a797402525d7d89048b1a73fa1b6e2cdb207073d1" exitCode=143 Jan 21 17:52:58 crc kubenswrapper[4799]: I0121 17:52:58.938665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68d6d069-6c63-4dab-8664-e9474f3615bb","Type":"ContainerDied","Data":"19f9a7487e24e08486f9dc1a797402525d7d89048b1a73fa1b6e2cdb207073d1"} Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.295489 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.166:9322/\": read tcp 10.217.0.2:57658->10.217.0.166:9322: read: connection reset by peer" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.295491 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.166:9322/\": read tcp 10.217.0.2:57660->10.217.0.166:9322: read: connection reset by peer" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.715155 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-74m2t" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.719218 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722449 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8gsc\" (UniqueName: \"kubernetes.io/projected/45457092-3e80-4528-99f1-b1f5f1c2f128-kube-api-access-f8gsc\") pod \"45457092-3e80-4528-99f1-b1f5f1c2f128\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722495 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gbpn\" (UniqueName: \"kubernetes.io/projected/482b08ae-060f-465a-9085-20d742c22a13-kube-api-access-6gbpn\") pod \"482b08ae-060f-465a-9085-20d742c22a13\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722531 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-config-data\") pod \"482b08ae-060f-465a-9085-20d742c22a13\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722550 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-scripts\") pod \"45457092-3e80-4528-99f1-b1f5f1c2f128\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722570 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-db-sync-config-data\") pod \"45457092-3e80-4528-99f1-b1f5f1c2f128\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722661 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-db-sync-config-data\") pod \"482b08ae-060f-465a-9085-20d742c22a13\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722734 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-config-data\") pod \"45457092-3e80-4528-99f1-b1f5f1c2f128\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722757 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45457092-3e80-4528-99f1-b1f5f1c2f128-etc-machine-id\") pod \"45457092-3e80-4528-99f1-b1f5f1c2f128\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.722814 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-combined-ca-bundle\") pod \"45457092-3e80-4528-99f1-b1f5f1c2f128\" (UID: \"45457092-3e80-4528-99f1-b1f5f1c2f128\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.726067 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45457092-3e80-4528-99f1-b1f5f1c2f128-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "45457092-3e80-4528-99f1-b1f5f1c2f128" (UID: "45457092-3e80-4528-99f1-b1f5f1c2f128"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.732752 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "482b08ae-060f-465a-9085-20d742c22a13" (UID: "482b08ae-060f-465a-9085-20d742c22a13"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.735007 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "45457092-3e80-4528-99f1-b1f5f1c2f128" (UID: "45457092-3e80-4528-99f1-b1f5f1c2f128"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.738870 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-scripts" (OuterVolumeSpecName: "scripts") pod "45457092-3e80-4528-99f1-b1f5f1c2f128" (UID: "45457092-3e80-4528-99f1-b1f5f1c2f128"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.739055 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45457092-3e80-4528-99f1-b1f5f1c2f128-kube-api-access-f8gsc" (OuterVolumeSpecName: "kube-api-access-f8gsc") pod "45457092-3e80-4528-99f1-b1f5f1c2f128" (UID: "45457092-3e80-4528-99f1-b1f5f1c2f128"). InnerVolumeSpecName "kube-api-access-f8gsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.743699 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/482b08ae-060f-465a-9085-20d742c22a13-kube-api-access-6gbpn" (OuterVolumeSpecName: "kube-api-access-6gbpn") pod "482b08ae-060f-465a-9085-20d742c22a13" (UID: "482b08ae-060f-465a-9085-20d742c22a13"). InnerVolumeSpecName "kube-api-access-6gbpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.814097 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45457092-3e80-4528-99f1-b1f5f1c2f128" (UID: "45457092-3e80-4528-99f1-b1f5f1c2f128"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.825802 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-combined-ca-bundle\") pod \"482b08ae-060f-465a-9085-20d742c22a13\" (UID: \"482b08ae-060f-465a-9085-20d742c22a13\") " Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.826326 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.826352 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8gsc\" (UniqueName: \"kubernetes.io/projected/45457092-3e80-4528-99f1-b1f5f1c2f128-kube-api-access-f8gsc\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.826367 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gbpn\" (UniqueName: \"kubernetes.io/projected/482b08ae-060f-465a-9085-20d742c22a13-kube-api-access-6gbpn\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.826380 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.826393 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.826403 4799 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.826414 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45457092-3e80-4528-99f1-b1f5f1c2f128-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.833868 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-config-data" (OuterVolumeSpecName: "config-data") pod "482b08ae-060f-465a-9085-20d742c22a13" (UID: "482b08ae-060f-465a-9085-20d742c22a13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.858457 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "482b08ae-060f-465a-9085-20d742c22a13" (UID: "482b08ae-060f-465a-9085-20d742c22a13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.867608 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-config-data" (OuterVolumeSpecName: "config-data") pod "45457092-3e80-4528-99f1-b1f5f1c2f128" (UID: "45457092-3e80-4528-99f1-b1f5f1c2f128"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.928088 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.928156 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45457092-3e80-4528-99f1-b1f5f1c2f128-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.928165 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/482b08ae-060f-465a-9085-20d742c22a13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.955321 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-74m2t" event={"ID":"482b08ae-060f-465a-9085-20d742c22a13","Type":"ContainerDied","Data":"c4c9980e58390a4f0beb925be2c90ea7f02e24562f01b3d94ad8d70f1ea3d57c"} Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.955370 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4c9980e58390a4f0beb925be2c90ea7f02e24562f01b3d94ad8d70f1ea3d57c" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.955456 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-74m2t" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.987891 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-s94pl" Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.987881 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-s94pl" event={"ID":"45457092-3e80-4528-99f1-b1f5f1c2f128","Type":"ContainerDied","Data":"cb663c37995d672b7288f14fb9bff80aea8cef394c7ac671b8f025dcb343d9b5"} Jan 21 17:52:59 crc kubenswrapper[4799]: I0121 17:52:59.987942 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb663c37995d672b7288f14fb9bff80aea8cef394c7ac671b8f025dcb343d9b5" Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.001617 4799 generic.go:334] "Generic (PLEG): container finished" podID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerID="26bc35d63df5fd0e22af7d7ab2eaeb3a698196cd3595d2330f14569a0fc05e0f" exitCode=0 Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.017321 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68d6d069-6c63-4dab-8664-e9474f3615bb","Type":"ContainerDied","Data":"26bc35d63df5fd0e22af7d7ab2eaeb3a698196cd3595d2330f14569a0fc05e0f"} Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.114002 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7d9c7df8bb-b2r9b" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.158:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.158:8443: connect: connection refused" Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.828440 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.834913 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.981916 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-sb\") pod \"ec158420-9dbf-4413-bd52-5041a9cee032\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.981981 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68d6d069-6c63-4dab-8664-e9474f3615bb-logs\") pod \"68d6d069-6c63-4dab-8664-e9474f3615bb\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982085 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-svc\") pod \"ec158420-9dbf-4413-bd52-5041a9cee032\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982169 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-config\") pod \"ec158420-9dbf-4413-bd52-5041a9cee032\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982221 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-config-data\") pod \"68d6d069-6c63-4dab-8664-e9474f3615bb\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982245 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-swift-storage-0\") pod \"ec158420-9dbf-4413-bd52-5041a9cee032\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982268 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-nb\") pod \"ec158420-9dbf-4413-bd52-5041a9cee032\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982317 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf49l\" (UniqueName: \"kubernetes.io/projected/68d6d069-6c63-4dab-8664-e9474f3615bb-kube-api-access-zf49l\") pod \"68d6d069-6c63-4dab-8664-e9474f3615bb\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982380 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-custom-prometheus-ca\") pod \"68d6d069-6c63-4dab-8664-e9474f3615bb\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982396 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nj992\" (UniqueName: \"kubernetes.io/projected/ec158420-9dbf-4413-bd52-5041a9cee032-kube-api-access-nj992\") pod \"ec158420-9dbf-4413-bd52-5041a9cee032\" (UID: \"ec158420-9dbf-4413-bd52-5041a9cee032\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.982442 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-combined-ca-bundle\") pod \"68d6d069-6c63-4dab-8664-e9474f3615bb\" (UID: \"68d6d069-6c63-4dab-8664-e9474f3615bb\") " Jan 21 17:53:00 crc kubenswrapper[4799]: I0121 17:53:00.983973 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68d6d069-6c63-4dab-8664-e9474f3615bb-logs" (OuterVolumeSpecName: "logs") pod "68d6d069-6c63-4dab-8664-e9474f3615bb" (UID: "68d6d069-6c63-4dab-8664-e9474f3615bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.008264 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec158420-9dbf-4413-bd52-5041a9cee032-kube-api-access-nj992" (OuterVolumeSpecName: "kube-api-access-nj992") pod "ec158420-9dbf-4413-bd52-5041a9cee032" (UID: "ec158420-9dbf-4413-bd52-5041a9cee032"). InnerVolumeSpecName "kube-api-access-nj992". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.011283 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68d6d069-6c63-4dab-8664-e9474f3615bb-kube-api-access-zf49l" (OuterVolumeSpecName: "kube-api-access-zf49l") pod "68d6d069-6c63-4dab-8664-e9474f3615bb" (UID: "68d6d069-6c63-4dab-8664-e9474f3615bb"). InnerVolumeSpecName "kube-api-access-zf49l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.088850 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" event={"ID":"ec158420-9dbf-4413-bd52-5041a9cee032","Type":"ContainerDied","Data":"c244a1761a75df522824f1e30ce504e8a06dfe5f72536f066c8e3415c36a5a82"} Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.088906 4799 scope.go:117] "RemoveContainer" containerID="fb039c711b87ae36fb240da128277b0f719a0e4e12486da552724ebd7f6923c4" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.089045 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f6cc8899-p9cjd" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.094550 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:01 crc kubenswrapper[4799]: E0121 17:53:01.095067 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec158420-9dbf-4413-bd52-5041a9cee032" containerName="dnsmasq-dns" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095083 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec158420-9dbf-4413-bd52-5041a9cee032" containerName="dnsmasq-dns" Jan 21 17:53:01 crc kubenswrapper[4799]: E0121 17:53:01.095097 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45457092-3e80-4528-99f1-b1f5f1c2f128" containerName="cinder-db-sync" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095103 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="45457092-3e80-4528-99f1-b1f5f1c2f128" containerName="cinder-db-sync" Jan 21 17:53:01 crc kubenswrapper[4799]: E0121 17:53:01.095114 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="482b08ae-060f-465a-9085-20d742c22a13" containerName="glance-db-sync" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095121 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="482b08ae-060f-465a-9085-20d742c22a13" containerName="glance-db-sync" Jan 21 17:53:01 crc kubenswrapper[4799]: E0121 17:53:01.095155 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095161 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api" Jan 21 17:53:01 crc kubenswrapper[4799]: E0121 17:53:01.095199 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec158420-9dbf-4413-bd52-5041a9cee032" containerName="init" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095205 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec158420-9dbf-4413-bd52-5041a9cee032" containerName="init" Jan 21 17:53:01 crc kubenswrapper[4799]: E0121 17:53:01.095228 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api-log" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095234 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api-log" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095406 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="482b08ae-060f-465a-9085-20d742c22a13" containerName="glance-db-sync" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095416 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095433 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" containerName="watcher-api-log" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095442 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="45457092-3e80-4528-99f1-b1f5f1c2f128" containerName="cinder-db-sync" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.095456 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec158420-9dbf-4413-bd52-5041a9cee032" containerName="dnsmasq-dns" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.102018 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68d6d069-6c63-4dab-8664-e9474f3615bb" (UID: "68d6d069-6c63-4dab-8664-e9474f3615bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.104054 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf49l\" (UniqueName: \"kubernetes.io/projected/68d6d069-6c63-4dab-8664-e9474f3615bb-kube-api-access-zf49l\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.104078 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nj992\" (UniqueName: \"kubernetes.io/projected/ec158420-9dbf-4413-bd52-5041a9cee032-kube-api-access-nj992\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.104088 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.104099 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68d6d069-6c63-4dab-8664-e9474f3615bb-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.116269 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.138677 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ec158420-9dbf-4413-bd52-5041a9cee032" (UID: "ec158420-9dbf-4413-bd52-5041a9cee032"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.138995 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.139479 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.140102 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zmv4p" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.141163 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.146979 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "68d6d069-6c63-4dab-8664-e9474f3615bb" (UID: "68d6d069-6c63-4dab-8664-e9474f3615bb"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.156716 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"68d6d069-6c63-4dab-8664-e9474f3615bb","Type":"ContainerDied","Data":"49e9584ccaf705b302551ae75a1ffc855b8a214a8b43f49c629e31b2d76d2fa5"} Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.156846 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.179651 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.217993 4799 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.218038 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.243718 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ec158420-9dbf-4413-bd52-5041a9cee032" (UID: "ec158420-9dbf-4413-bd52-5041a9cee032"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.245828 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ec158420-9dbf-4413-bd52-5041a9cee032" (UID: "ec158420-9dbf-4413-bd52-5041a9cee032"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.248444 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ec158420-9dbf-4413-bd52-5041a9cee032" (UID: "ec158420-9dbf-4413-bd52-5041a9cee032"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.272249 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-config-data" (OuterVolumeSpecName: "config-data") pod "68d6d069-6c63-4dab-8664-e9474f3615bb" (UID: "68d6d069-6c63-4dab-8664-e9474f3615bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.289897 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-config" (OuterVolumeSpecName: "config") pod "ec158420-9dbf-4413-bd52-5041a9cee032" (UID: "ec158420-9dbf-4413-bd52-5041a9cee032"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.306589 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b9fc6d799-4hmz9"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.311438 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.333175 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq4l2\" (UniqueName: \"kubernetes.io/projected/744eebc5-bc8e-4aaa-9165-35498dadc94a-kube-api-access-zq4l2\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.333304 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.333532 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.333577 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/744eebc5-bc8e-4aaa-9165-35498dadc94a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.333623 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-scripts\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.333762 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.336950 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.336984 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.336999 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d6d069-6c63-4dab-8664-e9474f3615bb-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.337013 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.337030 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec158420-9dbf-4413-bd52-5041a9cee032-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.343812 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b9fc6d799-4hmz9"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.364836 4799 scope.go:117] "RemoveContainer" containerID="1450769535c3b6f9aa5cdfcf9617407249d96861431df10db7bd894c258ba4f0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.439832 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-nb\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.439900 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq4l2\" (UniqueName: \"kubernetes.io/projected/744eebc5-bc8e-4aaa-9165-35498dadc94a-kube-api-access-zq4l2\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.439932 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-sb\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.439964 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.440067 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.440103 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/744eebc5-bc8e-4aaa-9165-35498dadc94a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.440172 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-scripts\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.440214 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-config\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.440247 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvd7j\" (UniqueName: \"kubernetes.io/projected/86219100-858f-44e7-930c-fdd9f00e2c25-kube-api-access-jvd7j\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.440289 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.440318 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-swift-storage-0\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.440436 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-svc\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.449693 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/744eebc5-bc8e-4aaa-9165-35498dadc94a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.466725 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b9fc6d799-4hmz9"] Jan 21 17:53:01 crc kubenswrapper[4799]: E0121 17:53:01.467882 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-jvd7j ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" podUID="86219100-858f-44e7-930c-fdd9f00e2c25" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.478350 4799 scope.go:117] "RemoveContainer" containerID="26bc35d63df5fd0e22af7d7ab2eaeb3a698196cd3595d2330f14569a0fc05e0f" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.499700 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.500961 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.511928 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.512378 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-scripts\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.516881 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq4l2\" (UniqueName: \"kubernetes.io/projected/744eebc5-bc8e-4aaa-9165-35498dadc94a-kube-api-access-zq4l2\") pod \"cinder-scheduler-0\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.546652 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-config\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.546733 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvd7j\" (UniqueName: \"kubernetes.io/projected/86219100-858f-44e7-930c-fdd9f00e2c25-kube-api-access-jvd7j\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.546783 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-swift-storage-0\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.546874 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-svc\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.546968 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-nb\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.547005 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-sb\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.548206 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-sb\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.548884 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-config\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.549794 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-svc\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.550544 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-swift-storage-0\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.560324 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-nb\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.600961 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvd7j\" (UniqueName: \"kubernetes.io/projected/86219100-858f-44e7-930c-fdd9f00e2c25-kube-api-access-jvd7j\") pod \"dnsmasq-dns-b9fc6d799-4hmz9\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.627764 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77f6cc8899-p9cjd"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.698684 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77f6cc8899-p9cjd"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.728553 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79d9d747c5-mfvl5"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.734649 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79d9d747c5-mfvl5"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.734778 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.757398 4799 scope.go:117] "RemoveContainer" containerID="19f9a7487e24e08486f9dc1a797402525d7d89048b1a73fa1b6e2cdb207073d1" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.758425 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.800930 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.844373 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.863675 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhjl5\" (UniqueName: \"kubernetes.io/projected/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-kube-api-access-nhjl5\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.863765 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-svc\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.863854 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-swift-storage-0\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.863892 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-config\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.863947 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-nb\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.863965 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-sb\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.864107 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.899849 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.909242 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.911170 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.912353 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.939141 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Jan 21 17:53:01 crc kubenswrapper[4799]: I0121 17:53:01.993990 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-config\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.010520 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-nb\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.010604 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-sb\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.010945 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhjl5\" (UniqueName: \"kubernetes.io/projected/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-kube-api-access-nhjl5\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.011008 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-svc\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.011052 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-swift-storage-0\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.013321 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-swift-storage-0\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.035058 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-nb\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.036038 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-config\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.047786 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-sb\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.051058 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-svc\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.054680 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.054802 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.057826 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.068862 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhjl5\" (UniqueName: \"kubernetes.io/projected/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-kube-api-access-nhjl5\") pod \"dnsmasq-dns-79d9d747c5-mfvl5\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.125527 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-public-tls-certs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.125690 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs8dg\" (UniqueName: \"kubernetes.io/projected/40368171-ea6d-4ab3-a1de-33204529aab4-kube-api-access-cs8dg\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.125902 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.125948 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.125967 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.126011 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-config-data\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.126100 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40368171-ea6d-4ab3-a1de-33204529aab4-logs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.139300 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.209589 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.234801 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.234870 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-scripts\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.234897 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.234919 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.234939 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.234980 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-config-data\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.235028 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data-custom\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.235090 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40368171-ea6d-4ab3-a1de-33204529aab4-logs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.235196 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/366d7f87-d059-4950-9c11-3c4cd4d64a58-logs\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.235237 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-public-tls-certs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.235283 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnzpl\" (UniqueName: \"kubernetes.io/projected/366d7f87-d059-4950-9c11-3c4cd4d64a58-kube-api-access-mnzpl\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.235371 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs8dg\" (UniqueName: \"kubernetes.io/projected/40368171-ea6d-4ab3-a1de-33204529aab4-kube-api-access-cs8dg\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.235408 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.235527 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/366d7f87-d059-4950-9c11-3c4cd4d64a58-etc-machine-id\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.239660 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40368171-ea6d-4ab3-a1de-33204529aab4-logs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.248001 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68d6d069-6c63-4dab-8664-e9474f3615bb" path="/var/lib/kubelet/pods/68d6d069-6c63-4dab-8664-e9474f3615bb/volumes" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.249083 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.250890 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec158420-9dbf-4413-bd52-5041a9cee032" path="/var/lib/kubelet/pods/ec158420-9dbf-4413-bd52-5041a9cee032/volumes" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.250976 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.259809 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.265838 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs8dg\" (UniqueName: \"kubernetes.io/projected/40368171-ea6d-4ab3-a1de-33204529aab4-kube-api-access-cs8dg\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.276150 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-public-tls-certs\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.276940 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-config-data\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.281915 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40368171-ea6d-4ab3-a1de-33204529aab4-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"40368171-ea6d-4ab3-a1de-33204529aab4\") " pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.301340 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.311394 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.338016 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvd7j\" (UniqueName: \"kubernetes.io/projected/86219100-858f-44e7-930c-fdd9f00e2c25-kube-api-access-jvd7j\") pod \"86219100-858f-44e7-930c-fdd9f00e2c25\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.338064 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-nb\") pod \"86219100-858f-44e7-930c-fdd9f00e2c25\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.338823 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "86219100-858f-44e7-930c-fdd9f00e2c25" (UID: "86219100-858f-44e7-930c-fdd9f00e2c25"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.342806 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "86219100-858f-44e7-930c-fdd9f00e2c25" (UID: "86219100-858f-44e7-930c-fdd9f00e2c25"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.342222 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-swift-storage-0\") pod \"86219100-858f-44e7-930c-fdd9f00e2c25\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.343288 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-sb\") pod \"86219100-858f-44e7-930c-fdd9f00e2c25\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.343388 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-svc\") pod \"86219100-858f-44e7-930c-fdd9f00e2c25\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.343412 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-config\") pod \"86219100-858f-44e7-930c-fdd9f00e2c25\" (UID: \"86219100-858f-44e7-930c-fdd9f00e2c25\") " Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.343897 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/366d7f87-d059-4950-9c11-3c4cd4d64a58-logs\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.343940 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnzpl\" (UniqueName: \"kubernetes.io/projected/366d7f87-d059-4950-9c11-3c4cd4d64a58-kube-api-access-mnzpl\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.344037 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.344158 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/366d7f87-d059-4950-9c11-3c4cd4d64a58-etc-machine-id\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.344258 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-scripts\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.344281 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.344335 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data-custom\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.344495 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/366d7f87-d059-4950-9c11-3c4cd4d64a58-etc-machine-id\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.344700 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/366d7f87-d059-4950-9c11-3c4cd4d64a58-logs\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.344942 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "86219100-858f-44e7-930c-fdd9f00e2c25" (UID: "86219100-858f-44e7-930c-fdd9f00e2c25"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.345431 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-config" (OuterVolumeSpecName: "config") pod "86219100-858f-44e7-930c-fdd9f00e2c25" (UID: "86219100-858f-44e7-930c-fdd9f00e2c25"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.345655 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "86219100-858f-44e7-930c-fdd9f00e2c25" (UID: "86219100-858f-44e7-930c-fdd9f00e2c25"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.348632 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.348711 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.352391 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data-custom\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.361235 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86219100-858f-44e7-930c-fdd9f00e2c25-kube-api-access-jvd7j" (OuterVolumeSpecName: "kube-api-access-jvd7j") pod "86219100-858f-44e7-930c-fdd9f00e2c25" (UID: "86219100-858f-44e7-930c-fdd9f00e2c25"). InnerVolumeSpecName "kube-api-access-jvd7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.361923 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.365005 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnzpl\" (UniqueName: \"kubernetes.io/projected/366d7f87-d059-4950-9c11-3c4cd4d64a58-kube-api-access-mnzpl\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.365323 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.374143 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-scripts\") pod \"cinder-api-0\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.410566 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.412764 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.418381 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.418414 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hhc5h" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.419384 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.427836 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.528660 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.530454 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.530472 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.530481 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86219100-858f-44e7-930c-fdd9f00e2c25-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.530494 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvd7j\" (UniqueName: \"kubernetes.io/projected/86219100-858f-44e7-930c-fdd9f00e2c25-kube-api-access-jvd7j\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.624978 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.629089 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.632263 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.632398 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.632442 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-logs\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.632480 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcfd4\" (UniqueName: \"kubernetes.io/projected/64d79edc-abb7-410f-b687-f102bfc189d1-kube-api-access-xcfd4\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.632535 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.632623 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.632684 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.641242 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.655820 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.740706 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.740814 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.740901 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.740940 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741021 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44cw2\" (UniqueName: \"kubernetes.io/projected/8c8a46b8-4c1b-413d-a085-fa3994505174-kube-api-access-44cw2\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741096 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741155 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741242 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741271 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-logs\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741315 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcfd4\" (UniqueName: \"kubernetes.io/projected/64d79edc-abb7-410f-b687-f102bfc189d1-kube-api-access-xcfd4\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741409 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741463 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-logs\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741527 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.741570 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.743066 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.744649 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-logs\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.745011 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.756660 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.772708 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.772702 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.778929 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcfd4\" (UniqueName: \"kubernetes.io/projected/64d79edc-abb7-410f-b687-f102bfc189d1-kube-api-access-xcfd4\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.843558 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.844945 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.845078 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44cw2\" (UniqueName: \"kubernetes.io/projected/8c8a46b8-4c1b-413d-a085-fa3994505174-kube-api-access-44cw2\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.845220 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.845273 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.845349 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.845382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-logs\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.845462 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.849567 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.849817 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-logs\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.849844 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.853427 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.860261 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.864881 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.939197 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44cw2\" (UniqueName: \"kubernetes.io/projected/8c8a46b8-4c1b-413d-a085-fa3994505174-kube-api-access-44cw2\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.962922 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:02 crc kubenswrapper[4799]: I0121 17:53:02.982133 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.017019 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.143827 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.230781 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.270527 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" event={"ID":"9bb30a38-ea0d-4580-9a41-326f00b5c149","Type":"ContainerStarted","Data":"a4ac6ea8bc65801d185a2b1e62d9a694880d329e6e3d66d1f43de7238147a937"} Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.294781 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"c37f7c3a-832c-4991-9fe0-6e923befb599","Type":"ContainerStarted","Data":"4939d552bcb4fe0a85e289b6a186d3894554241f8c5736d8f5c81d88871989b9"} Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.303780 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f979ff5f7-qvdts" event={"ID":"c7dc5147-addd-46d9-b5b3-3f328c0a5a94","Type":"ContainerStarted","Data":"08334718df43972a5c7b8a4441f6f3735c17cbb10855a1b04f99bbcb16c3dfed"} Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.310306 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.310384 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"744eebc5-bc8e-4aaa-9165-35498dadc94a","Type":"ContainerStarted","Data":"0d4b815aa0764eaa650bda10bd507ece045682da60b40e13119c26f357fbb95d"} Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.318803 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerStarted","Data":"feb17023cb77e67cafee7aaedbb3379b33fc70a5c6313f02972807ffc7e15f7f"} Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.323102 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cfcccc69b-6zwk4" event={"ID":"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3","Type":"ContainerStarted","Data":"961a919a6986b7eeeafb39012a8ee9656c5d9140105389844fad3792b03659d6"} Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.326849 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=5.486150545 podStartE2EDuration="20.326825528s" podCreationTimestamp="2026-01-21 17:52:43 +0000 UTC" firstStartedPulling="2026-01-21 17:52:45.809717636 +0000 UTC m=+1192.436007659" lastFinishedPulling="2026-01-21 17:53:00.650392619 +0000 UTC m=+1207.276682642" observedRunningTime="2026-01-21 17:53:03.318596008 +0000 UTC m=+1209.944886051" watchObservedRunningTime="2026-01-21 17:53:03.326825528 +0000 UTC m=+1209.953115551" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.330238 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9fc6d799-4hmz9" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.372870 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=5.829978222 podStartE2EDuration="20.372848195s" podCreationTimestamp="2026-01-21 17:52:43 +0000 UTC" firstStartedPulling="2026-01-21 17:52:46.109895462 +0000 UTC m=+1192.736185485" lastFinishedPulling="2026-01-21 17:53:00.652765415 +0000 UTC m=+1207.279055458" observedRunningTime="2026-01-21 17:53:03.351862708 +0000 UTC m=+1209.978152751" watchObservedRunningTime="2026-01-21 17:53:03.372848195 +0000 UTC m=+1209.999138218" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.486178 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b9fc6d799-4hmz9"] Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.543213 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b9fc6d799-4hmz9"] Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.561292 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.613535 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79d9d747c5-mfvl5"] Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.827370 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.903263 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.903630 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.994627 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:03 crc kubenswrapper[4799]: I0121 17:53:03.995739 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.228316 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86219100-858f-44e7-930c-fdd9f00e2c25" path="/var/lib/kubelet/pods/86219100-858f-44e7-930c-fdd9f00e2c25/volumes" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.433788 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.461564 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="ceilometer-central-agent" containerID="cri-o://ab1356f09185e5bcd03fc2bf91986077d64da5b25ade94ba4110f03988ec963f" gracePeriod=30 Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.461703 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="proxy-httpd" containerID="cri-o://b03bdbe200981c553eeefe9cf4d3dc8468454e2e651c397a5bb471b84255b29f" gracePeriod=30 Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.461758 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="sg-core" containerID="cri-o://5db2fe2be2fee23676dafc35c5104e8f1d5377f39bc33ba233590f44fc108599" gracePeriod=30 Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.461764 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerStarted","Data":"b03bdbe200981c553eeefe9cf4d3dc8468454e2e651c397a5bb471b84255b29f"} Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.461806 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="ceilometer-notification-agent" containerID="cri-o://8f23bc960d06b776a628754f163280218abe7c353c40a867d70e231f9308d1ec" gracePeriod=30 Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.461815 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.518053 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=5.522363018 podStartE2EDuration="1m4.518031496s" podCreationTimestamp="2026-01-21 17:52:00 +0000 UTC" firstStartedPulling="2026-01-21 17:52:02.760516109 +0000 UTC m=+1149.386806132" lastFinishedPulling="2026-01-21 17:53:01.756184587 +0000 UTC m=+1208.382474610" observedRunningTime="2026-01-21 17:53:04.503203541 +0000 UTC m=+1211.129493574" watchObservedRunningTime="2026-01-21 17:53:04.518031496 +0000 UTC m=+1211.144321519" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.540435 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"366d7f87-d059-4950-9c11-3c4cd4d64a58","Type":"ContainerStarted","Data":"1755b7084e12783ad03d07528e38c4ebb686be64f2bd843b82f6ad6af907083d"} Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.583795 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cfcccc69b-6zwk4" event={"ID":"f4c9e3bf-79dd-49d5-af90-db5a6087f0f3","Type":"ContainerStarted","Data":"ff6834dd7bd8214931b328a501340588a0f5eb426b9b9709b03b95249ab60d75"} Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.585065 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.585100 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.634282 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" event={"ID":"9bb30a38-ea0d-4580-9a41-326f00b5c149","Type":"ContainerStarted","Data":"bb4279c09ae68a178ad5743f821b303487a9f2d2708a3cd485c96b62aab2e97f"} Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.647767 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"40368171-ea6d-4ab3-a1de-33204529aab4","Type":"ContainerStarted","Data":"d2194fc44a5b60c7c138f69f22e9ddb786f744465102dfb193e1bebefa3cbe72"} Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.647840 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"40368171-ea6d-4ab3-a1de-33204529aab4","Type":"ContainerStarted","Data":"fe4117261b0a83bdc9f52233415e7679de850adfe1f5ce23fd4909218ee1ecf8"} Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.653754 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" event={"ID":"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876","Type":"ContainerStarted","Data":"be5f8bf7ca05c05eda2040f5f939e24576b2fb0173e76a56f8758f77bd57c049"} Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.653815 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.652272 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-cfcccc69b-6zwk4" podStartSLOduration=16.652241279 podStartE2EDuration="16.652241279s" podCreationTimestamp="2026-01-21 17:52:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:04.622825367 +0000 UTC m=+1211.249115410" watchObservedRunningTime="2026-01-21 17:53:04.652241279 +0000 UTC m=+1211.278531302" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.766250 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6b9d59f6f8-vl6sn" podStartSLOduration=6.416444154 podStartE2EDuration="20.766221467s" podCreationTimestamp="2026-01-21 17:52:44 +0000 UTC" firstStartedPulling="2026-01-21 17:52:46.299841894 +0000 UTC m=+1192.926131917" lastFinishedPulling="2026-01-21 17:53:00.649619207 +0000 UTC m=+1207.275909230" observedRunningTime="2026-01-21 17:53:04.663653949 +0000 UTC m=+1211.289943982" watchObservedRunningTime="2026-01-21 17:53:04.766221467 +0000 UTC m=+1211.392511490" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.767213 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.767399 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Jan 21 17:53:04 crc kubenswrapper[4799]: I0121 17:53:04.768020 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.560080 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.724697 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f979ff5f7-qvdts" event={"ID":"c7dc5147-addd-46d9-b5b3-3f328c0a5a94","Type":"ContainerStarted","Data":"ab45e388dd7e3d8662526785371e257d02d334ba38950296a3972f72f7d1ea01"} Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.769027 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7f979ff5f7-qvdts" podStartSLOduration=7.773775358 podStartE2EDuration="21.769001575s" podCreationTimestamp="2026-01-21 17:52:44 +0000 UTC" firstStartedPulling="2026-01-21 17:52:46.652506117 +0000 UTC m=+1193.278796140" lastFinishedPulling="2026-01-21 17:53:00.647732334 +0000 UTC m=+1207.274022357" observedRunningTime="2026-01-21 17:53:05.757948246 +0000 UTC m=+1212.384238279" watchObservedRunningTime="2026-01-21 17:53:05.769001575 +0000 UTC m=+1212.395291588" Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.783576 4799 generic.go:334] "Generic (PLEG): container finished" podID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerID="b03bdbe200981c553eeefe9cf4d3dc8468454e2e651c397a5bb471b84255b29f" exitCode=0 Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.783613 4799 generic.go:334] "Generic (PLEG): container finished" podID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerID="5db2fe2be2fee23676dafc35c5104e8f1d5377f39bc33ba233590f44fc108599" exitCode=2 Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.783622 4799 generic.go:334] "Generic (PLEG): container finished" podID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerID="ab1356f09185e5bcd03fc2bf91986077d64da5b25ade94ba4110f03988ec963f" exitCode=0 Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.783664 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerDied","Data":"b03bdbe200981c553eeefe9cf4d3dc8468454e2e651c397a5bb471b84255b29f"} Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.783694 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerDied","Data":"5db2fe2be2fee23676dafc35c5104e8f1d5377f39bc33ba233590f44fc108599"} Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.783704 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerDied","Data":"ab1356f09185e5bcd03fc2bf91986077d64da5b25ade94ba4110f03988ec963f"} Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.788375 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"744eebc5-bc8e-4aaa-9165-35498dadc94a","Type":"ContainerStarted","Data":"fa94f6751f355a4267199fb8670a480539dfe896d65aa307d00f129d20f75045"} Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.848087 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"40368171-ea6d-4ab3-a1de-33204529aab4","Type":"ContainerStarted","Data":"bd6c3d6ab130cb4c90f374437b50d407fa4851768fd88fb0de7d7ccac4d6c481"} Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.848707 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.876542 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=4.876519523 podStartE2EDuration="4.876519523s" podCreationTimestamp="2026-01-21 17:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:05.872062188 +0000 UTC m=+1212.498352231" watchObservedRunningTime="2026-01-21 17:53:05.876519523 +0000 UTC m=+1212.502809556" Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.880529 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"64d79edc-abb7-410f-b687-f102bfc189d1","Type":"ContainerStarted","Data":"74b5e632569e8467d503f2ea15638295febc26d41c042a2ce88f38715d9147c1"} Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.894593 4799 generic.go:334] "Generic (PLEG): container finished" podID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" containerID="af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4" exitCode=0 Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.894727 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" event={"ID":"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876","Type":"ContainerDied","Data":"af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4"} Jan 21 17:53:05 crc kubenswrapper[4799]: I0121 17:53:05.900247 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8c8a46b8-4c1b-413d-a085-fa3994505174","Type":"ContainerStarted","Data":"fa639c39028cdb1d9d2f72559b9e14ddfb7e654459cb9f2c3b2ed6a43aabcf21"} Jan 21 17:53:06 crc kubenswrapper[4799]: I0121 17:53:06.082719 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:06 crc kubenswrapper[4799]: I0121 17:53:06.156907 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:06 crc kubenswrapper[4799]: I0121 17:53:06.922986 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"64d79edc-abb7-410f-b687-f102bfc189d1","Type":"ContainerStarted","Data":"f25222919a2456e7386f4791b3094a4c3cba6766ca45a09c6b86f8ea814b4ff1"} Jan 21 17:53:06 crc kubenswrapper[4799]: I0121 17:53:06.939334 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" event={"ID":"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876","Type":"ContainerStarted","Data":"a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80"} Jan 21 17:53:06 crc kubenswrapper[4799]: I0121 17:53:06.939480 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:06 crc kubenswrapper[4799]: I0121 17:53:06.942639 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8c8a46b8-4c1b-413d-a085-fa3994505174","Type":"ContainerStarted","Data":"73e5793caa1aa068f8556db9aa3ace8701be39f6ff4982bfa1050b63e77dcedc"} Jan 21 17:53:06 crc kubenswrapper[4799]: I0121 17:53:06.946684 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"366d7f87-d059-4950-9c11-3c4cd4d64a58","Type":"ContainerStarted","Data":"719539cd7efab3bbc47ee08173e8b2c604357bbecdc273906d1aa8c66631bf08"} Jan 21 17:53:06 crc kubenswrapper[4799]: I0121 17:53:06.969081 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" podStartSLOduration=5.969060141 podStartE2EDuration="5.969060141s" podCreationTimestamp="2026-01-21 17:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:06.962898149 +0000 UTC m=+1213.589188192" watchObservedRunningTime="2026-01-21 17:53:06.969060141 +0000 UTC m=+1213.595350164" Jan 21 17:53:07 crc kubenswrapper[4799]: I0121 17:53:07.312504 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Jan 21 17:53:07 crc kubenswrapper[4799]: I0121 17:53:07.904848 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:07 crc kubenswrapper[4799]: I0121 17:53:07.954498 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:53:07 crc kubenswrapper[4799]: I0121 17:53:07.981923 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7644966657-gcssj" Jan 21 17:53:08 crc kubenswrapper[4799]: I0121 17:53:08.060026 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7c9474f76d-ptsv9"] Jan 21 17:53:08 crc kubenswrapper[4799]: I0121 17:53:08.060352 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7c9474f76d-ptsv9" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerName="neutron-api" containerID="cri-o://07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755" gracePeriod=30 Jan 21 17:53:08 crc kubenswrapper[4799]: I0121 17:53:08.060414 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7c9474f76d-ptsv9" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerName="neutron-httpd" containerID="cri-o://e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc" gracePeriod=30 Jan 21 17:53:08 crc kubenswrapper[4799]: I0121 17:53:08.968681 4799 generic.go:334] "Generic (PLEG): container finished" podID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerID="e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc" exitCode=0 Jan 21 17:53:08 crc kubenswrapper[4799]: I0121 17:53:08.968766 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9474f76d-ptsv9" event={"ID":"57391f37-88fc-4dca-9afd-159d78c47ca1","Type":"ContainerDied","Data":"e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc"} Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.000969 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"744eebc5-bc8e-4aaa-9165-35498dadc94a","Type":"ContainerStarted","Data":"cb91333aefe48ff642fb30e865171b3cb1000c1a25db0b45c4d5ac75c209c400"} Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.018401 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"366d7f87-d059-4950-9c11-3c4cd4d64a58","Type":"ContainerStarted","Data":"f8b5f882b9446b1699792a19004f0af42dca2a6fe86dd901c5e8883c45c8b41d"} Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.021240 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerName="cinder-api-log" containerID="cri-o://719539cd7efab3bbc47ee08173e8b2c604357bbecdc273906d1aa8c66631bf08" gracePeriod=30 Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.021395 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerName="cinder-api" containerID="cri-o://f8b5f882b9446b1699792a19004f0af42dca2a6fe86dd901c5e8883c45c8b41d" gracePeriod=30 Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.046807 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"64d79edc-abb7-410f-b687-f102bfc189d1","Type":"ContainerStarted","Data":"49aba02777a3f86b24bad8928534af44cc4e251edf34bb29b165d3b6d9f00d6a"} Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.047025 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" containerName="glance-log" containerID="cri-o://f25222919a2456e7386f4791b3094a4c3cba6766ca45a09c6b86f8ea814b4ff1" gracePeriod=30 Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.047173 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" containerName="glance-httpd" containerID="cri-o://49aba02777a3f86b24bad8928534af44cc4e251edf34bb29b165d3b6d9f00d6a" gracePeriod=30 Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.065755 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=8.448553792 podStartE2EDuration="9.065726075s" podCreationTimestamp="2026-01-21 17:53:01 +0000 UTC" firstStartedPulling="2026-01-21 17:53:02.981818978 +0000 UTC m=+1209.608109001" lastFinishedPulling="2026-01-21 17:53:03.598991261 +0000 UTC m=+1210.225281284" observedRunningTime="2026-01-21 17:53:10.05375948 +0000 UTC m=+1216.680049513" watchObservedRunningTime="2026-01-21 17:53:10.065726075 +0000 UTC m=+1216.692016098" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.075522 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8c8a46b8-4c1b-413d-a085-fa3994505174","Type":"ContainerStarted","Data":"04875df20be22c723dbbe658ddf0114d0e3681f8f13589b8e050eca8923a0dfa"} Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.075784 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerName="glance-log" containerID="cri-o://73e5793caa1aa068f8556db9aa3ace8701be39f6ff4982bfa1050b63e77dcedc" gracePeriod=30 Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.076196 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerName="glance-httpd" containerID="cri-o://04875df20be22c723dbbe658ddf0114d0e3681f8f13589b8e050eca8923a0dfa" gracePeriod=30 Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.094825 4799 generic.go:334] "Generic (PLEG): container finished" podID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerID="8f23bc960d06b776a628754f163280218abe7c353c40a867d70e231f9308d1ec" exitCode=0 Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.094899 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerDied","Data":"8f23bc960d06b776a628754f163280218abe7c353c40a867d70e231f9308d1ec"} Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.094934 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4b8ffb2a-2019-41d5-a0fb-c05199bcc230","Type":"ContainerDied","Data":"e1af4383698524337c3547b707a85d1d51d1d0a641eaff9cfdb9a909ae2490d9"} Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.094947 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1af4383698524337c3547b707a85d1d51d1d0a641eaff9cfdb9a909ae2490d9" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.102081 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.102062491 podStartE2EDuration="9.102062491s" podCreationTimestamp="2026-01-21 17:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:10.094613093 +0000 UTC m=+1216.720903126" watchObservedRunningTime="2026-01-21 17:53:10.102062491 +0000 UTC m=+1216.728352514" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.117224 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7d9c7df8bb-b2r9b" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.158:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.158:8443: connect: connection refused" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.127245 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=9.127225485 podStartE2EDuration="9.127225485s" podCreationTimestamp="2026-01-21 17:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:10.121868995 +0000 UTC m=+1216.748159018" watchObservedRunningTime="2026-01-21 17:53:10.127225485 +0000 UTC m=+1216.753515508" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.175345 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.17532346 podStartE2EDuration="9.17532346s" podCreationTimestamp="2026-01-21 17:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:10.160857536 +0000 UTC m=+1216.787147559" watchObservedRunningTime="2026-01-21 17:53:10.17532346 +0000 UTC m=+1216.801613483" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.287873 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.330761 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-log-httpd\") pod \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.330839 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-run-httpd\") pod \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.330891 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-sg-core-conf-yaml\") pod \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.330935 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-config-data\") pod \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.330959 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-combined-ca-bundle\") pod \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.331113 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnmxt\" (UniqueName: \"kubernetes.io/projected/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-kube-api-access-nnmxt\") pod \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.331361 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-scripts\") pod \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\" (UID: \"4b8ffb2a-2019-41d5-a0fb-c05199bcc230\") " Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.339656 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-scripts" (OuterVolumeSpecName: "scripts") pod "4b8ffb2a-2019-41d5-a0fb-c05199bcc230" (UID: "4b8ffb2a-2019-41d5-a0fb-c05199bcc230"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.342881 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4b8ffb2a-2019-41d5-a0fb-c05199bcc230" (UID: "4b8ffb2a-2019-41d5-a0fb-c05199bcc230"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.343333 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4b8ffb2a-2019-41d5-a0fb-c05199bcc230" (UID: "4b8ffb2a-2019-41d5-a0fb-c05199bcc230"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.354371 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-kube-api-access-nnmxt" (OuterVolumeSpecName: "kube-api-access-nnmxt") pod "4b8ffb2a-2019-41d5-a0fb-c05199bcc230" (UID: "4b8ffb2a-2019-41d5-a0fb-c05199bcc230"). InnerVolumeSpecName "kube-api-access-nnmxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.428151 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4b8ffb2a-2019-41d5-a0fb-c05199bcc230" (UID: "4b8ffb2a-2019-41d5-a0fb-c05199bcc230"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.454215 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnmxt\" (UniqueName: \"kubernetes.io/projected/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-kube-api-access-nnmxt\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.454244 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.454255 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.454264 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.454272 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.460380 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.507861 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-config-data" (OuterVolumeSpecName: "config-data") pod "4b8ffb2a-2019-41d5-a0fb-c05199bcc230" (UID: "4b8ffb2a-2019-41d5-a0fb-c05199bcc230"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.524249 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b8ffb2a-2019-41d5-a0fb-c05199bcc230" (UID: "4b8ffb2a-2019-41d5-a0fb-c05199bcc230"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.558732 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:10 crc kubenswrapper[4799]: I0121 17:53:10.558992 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b8ffb2a-2019-41d5-a0fb-c05199bcc230-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.129353 4799 generic.go:334] "Generic (PLEG): container finished" podID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerID="04875df20be22c723dbbe658ddf0114d0e3681f8f13589b8e050eca8923a0dfa" exitCode=0 Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.129395 4799 generic.go:334] "Generic (PLEG): container finished" podID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerID="73e5793caa1aa068f8556db9aa3ace8701be39f6ff4982bfa1050b63e77dcedc" exitCode=143 Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.129501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8c8a46b8-4c1b-413d-a085-fa3994505174","Type":"ContainerDied","Data":"04875df20be22c723dbbe658ddf0114d0e3681f8f13589b8e050eca8923a0dfa"} Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.129536 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8c8a46b8-4c1b-413d-a085-fa3994505174","Type":"ContainerDied","Data":"73e5793caa1aa068f8556db9aa3ace8701be39f6ff4982bfa1050b63e77dcedc"} Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.134275 4799 generic.go:334] "Generic (PLEG): container finished" podID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerID="f8b5f882b9446b1699792a19004f0af42dca2a6fe86dd901c5e8883c45c8b41d" exitCode=0 Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.134310 4799 generic.go:334] "Generic (PLEG): container finished" podID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerID="719539cd7efab3bbc47ee08173e8b2c604357bbecdc273906d1aa8c66631bf08" exitCode=143 Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.134399 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"366d7f87-d059-4950-9c11-3c4cd4d64a58","Type":"ContainerDied","Data":"f8b5f882b9446b1699792a19004f0af42dca2a6fe86dd901c5e8883c45c8b41d"} Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.134429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"366d7f87-d059-4950-9c11-3c4cd4d64a58","Type":"ContainerDied","Data":"719539cd7efab3bbc47ee08173e8b2c604357bbecdc273906d1aa8c66631bf08"} Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.163584 4799 generic.go:334] "Generic (PLEG): container finished" podID="64d79edc-abb7-410f-b687-f102bfc189d1" containerID="49aba02777a3f86b24bad8928534af44cc4e251edf34bb29b165d3b6d9f00d6a" exitCode=0 Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.163869 4799 generic.go:334] "Generic (PLEG): container finished" podID="64d79edc-abb7-410f-b687-f102bfc189d1" containerID="f25222919a2456e7386f4791b3094a4c3cba6766ca45a09c6b86f8ea814b4ff1" exitCode=143 Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.164207 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"64d79edc-abb7-410f-b687-f102bfc189d1","Type":"ContainerDied","Data":"49aba02777a3f86b24bad8928534af44cc4e251edf34bb29b165d3b6d9f00d6a"} Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.164288 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"64d79edc-abb7-410f-b687-f102bfc189d1","Type":"ContainerDied","Data":"f25222919a2456e7386f4791b3094a4c3cba6766ca45a09c6b86f8ea814b4ff1"} Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.164422 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.326780 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.361299 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.418734 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.420668 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-logs\") pod \"8c8a46b8-4c1b-413d-a085-fa3994505174\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.420745 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-combined-ca-bundle\") pod \"8c8a46b8-4c1b-413d-a085-fa3994505174\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.420792 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44cw2\" (UniqueName: \"kubernetes.io/projected/8c8a46b8-4c1b-413d-a085-fa3994505174-kube-api-access-44cw2\") pod \"8c8a46b8-4c1b-413d-a085-fa3994505174\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.420842 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-httpd-run\") pod \"8c8a46b8-4c1b-413d-a085-fa3994505174\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.420881 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-scripts\") pod \"8c8a46b8-4c1b-413d-a085-fa3994505174\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.420911 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-config-data\") pod \"8c8a46b8-4c1b-413d-a085-fa3994505174\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.420937 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"8c8a46b8-4c1b-413d-a085-fa3994505174\" (UID: \"8c8a46b8-4c1b-413d-a085-fa3994505174\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.422421 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-logs" (OuterVolumeSpecName: "logs") pod "8c8a46b8-4c1b-413d-a085-fa3994505174" (UID: "8c8a46b8-4c1b-413d-a085-fa3994505174"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.433637 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "8c8a46b8-4c1b-413d-a085-fa3994505174" (UID: "8c8a46b8-4c1b-413d-a085-fa3994505174"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.441725 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8c8a46b8-4c1b-413d-a085-fa3994505174" (UID: "8c8a46b8-4c1b-413d-a085-fa3994505174"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.465199 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:11 crc kubenswrapper[4799]: E0121 17:53:11.465692 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="proxy-httpd" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.465711 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="proxy-httpd" Jan 21 17:53:11 crc kubenswrapper[4799]: E0121 17:53:11.465733 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="ceilometer-notification-agent" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.465740 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="ceilometer-notification-agent" Jan 21 17:53:11 crc kubenswrapper[4799]: E0121 17:53:11.465819 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="ceilometer-central-agent" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.465828 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="ceilometer-central-agent" Jan 21 17:53:11 crc kubenswrapper[4799]: E0121 17:53:11.465837 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerName="glance-log" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.465843 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerName="glance-log" Jan 21 17:53:11 crc kubenswrapper[4799]: E0121 17:53:11.465853 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerName="glance-httpd" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.465860 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerName="glance-httpd" Jan 21 17:53:11 crc kubenswrapper[4799]: E0121 17:53:11.465874 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="sg-core" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.465879 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="sg-core" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.466080 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerName="glance-log" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.466096 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="ceilometer-notification-agent" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.466107 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="proxy-httpd" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.466117 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="sg-core" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.466160 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" containerName="glance-httpd" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.466169 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" containerName="ceilometer-central-agent" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.476865 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-scripts" (OuterVolumeSpecName: "scripts") pod "8c8a46b8-4c1b-413d-a085-fa3994505174" (UID: "8c8a46b8-4c1b-413d-a085-fa3994505174"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.482455 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c8a46b8-4c1b-413d-a085-fa3994505174-kube-api-access-44cw2" (OuterVolumeSpecName: "kube-api-access-44cw2") pod "8c8a46b8-4c1b-413d-a085-fa3994505174" (UID: "8c8a46b8-4c1b-413d-a085-fa3994505174"). InnerVolumeSpecName "kube-api-access-44cw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.482666 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c8a46b8-4c1b-413d-a085-fa3994505174" (UID: "8c8a46b8-4c1b-413d-a085-fa3994505174"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.485400 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.489592 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.499731 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529460 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529568 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-scripts\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529638 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-config-data\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529663 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-log-httpd\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529696 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529737 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq66g\" (UniqueName: \"kubernetes.io/projected/f460180e-2550-4286-ae68-85d752d3a3a3-kube-api-access-xq66g\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529752 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-run-httpd\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529829 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529857 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529874 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529884 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529898 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44cw2\" (UniqueName: \"kubernetes.io/projected/8c8a46b8-4c1b-413d-a085-fa3994505174-kube-api-access-44cw2\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.529907 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c8a46b8-4c1b-413d-a085-fa3994505174-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.547573 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.621775 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-config-data" (OuterVolumeSpecName: "config-data") pod "8c8a46b8-4c1b-413d-a085-fa3994505174" (UID: "8c8a46b8-4c1b-413d-a085-fa3994505174"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.623242 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.623547 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.634927 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq66g\" (UniqueName: \"kubernetes.io/projected/f460180e-2550-4286-ae68-85d752d3a3a3-kube-api-access-xq66g\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.634982 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-run-httpd\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.635109 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.635208 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-scripts\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.635276 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-config-data\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.635298 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-log-httpd\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.635340 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.635860 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8a46b8-4c1b-413d-a085-fa3994505174-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.637388 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.639074 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-scripts\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.639107 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.639258 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-log-httpd\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.641998 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.643405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-config-data\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.646094 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-run-httpd\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.656255 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.662641 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq66g\" (UniqueName: \"kubernetes.io/projected/f460180e-2550-4286-ae68-85d752d3a3a3-kube-api-access-xq66g\") pod \"ceilometer-0\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.739373 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"64d79edc-abb7-410f-b687-f102bfc189d1\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.739531 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-config-data\") pod \"64d79edc-abb7-410f-b687-f102bfc189d1\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.739575 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcfd4\" (UniqueName: \"kubernetes.io/projected/64d79edc-abb7-410f-b687-f102bfc189d1-kube-api-access-xcfd4\") pod \"64d79edc-abb7-410f-b687-f102bfc189d1\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.739717 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-combined-ca-bundle\") pod \"64d79edc-abb7-410f-b687-f102bfc189d1\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.739747 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-httpd-run\") pod \"64d79edc-abb7-410f-b687-f102bfc189d1\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.739897 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-logs\") pod \"64d79edc-abb7-410f-b687-f102bfc189d1\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.739951 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-combined-ca-bundle\") pod \"366d7f87-d059-4950-9c11-3c4cd4d64a58\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.739975 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-scripts\") pod \"64d79edc-abb7-410f-b687-f102bfc189d1\" (UID: \"64d79edc-abb7-410f-b687-f102bfc189d1\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.740592 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "64d79edc-abb7-410f-b687-f102bfc189d1" (UID: "64d79edc-abb7-410f-b687-f102bfc189d1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.741074 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-logs" (OuterVolumeSpecName: "logs") pod "64d79edc-abb7-410f-b687-f102bfc189d1" (UID: "64d79edc-abb7-410f-b687-f102bfc189d1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.744458 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64d79edc-abb7-410f-b687-f102bfc189d1-kube-api-access-xcfd4" (OuterVolumeSpecName: "kube-api-access-xcfd4") pod "64d79edc-abb7-410f-b687-f102bfc189d1" (UID: "64d79edc-abb7-410f-b687-f102bfc189d1"). InnerVolumeSpecName "kube-api-access-xcfd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.749400 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-scripts" (OuterVolumeSpecName: "scripts") pod "64d79edc-abb7-410f-b687-f102bfc189d1" (UID: "64d79edc-abb7-410f-b687-f102bfc189d1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.755762 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "64d79edc-abb7-410f-b687-f102bfc189d1" (UID: "64d79edc-abb7-410f-b687-f102bfc189d1"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.761389 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.775231 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "366d7f87-d059-4950-9c11-3c4cd4d64a58" (UID: "366d7f87-d059-4950-9c11-3c4cd4d64a58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.784577 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64d79edc-abb7-410f-b687-f102bfc189d1" (UID: "64d79edc-abb7-410f-b687-f102bfc189d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.836071 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-config-data" (OuterVolumeSpecName: "config-data") pod "64d79edc-abb7-410f-b687-f102bfc189d1" (UID: "64d79edc-abb7-410f-b687-f102bfc189d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.842420 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data-custom\") pod \"366d7f87-d059-4950-9c11-3c4cd4d64a58\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.842551 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/366d7f87-d059-4950-9c11-3c4cd4d64a58-etc-machine-id\") pod \"366d7f87-d059-4950-9c11-3c4cd4d64a58\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.842653 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/366d7f87-d059-4950-9c11-3c4cd4d64a58-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "366d7f87-d059-4950-9c11-3c4cd4d64a58" (UID: "366d7f87-d059-4950-9c11-3c4cd4d64a58"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.842708 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data\") pod \"366d7f87-d059-4950-9c11-3c4cd4d64a58\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.842764 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnzpl\" (UniqueName: \"kubernetes.io/projected/366d7f87-d059-4950-9c11-3c4cd4d64a58-kube-api-access-mnzpl\") pod \"366d7f87-d059-4950-9c11-3c4cd4d64a58\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.842793 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-scripts\") pod \"366d7f87-d059-4950-9c11-3c4cd4d64a58\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.842871 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/366d7f87-d059-4950-9c11-3c4cd4d64a58-logs\") pod \"366d7f87-d059-4950-9c11-3c4cd4d64a58\" (UID: \"366d7f87-d059-4950-9c11-3c4cd4d64a58\") " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.843527 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/366d7f87-d059-4950-9c11-3c4cd4d64a58-logs" (OuterVolumeSpecName: "logs") pod "366d7f87-d059-4950-9c11-3c4cd4d64a58" (UID: "366d7f87-d059-4950-9c11-3c4cd4d64a58"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844828 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/366d7f87-d059-4950-9c11-3c4cd4d64a58-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844868 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844879 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844892 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64d79edc-abb7-410f-b687-f102bfc189d1-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844926 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844934 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844960 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844969 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64d79edc-abb7-410f-b687-f102bfc189d1-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844978 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcfd4\" (UniqueName: \"kubernetes.io/projected/64d79edc-abb7-410f-b687-f102bfc189d1-kube-api-access-xcfd4\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.844989 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/366d7f87-d059-4950-9c11-3c4cd4d64a58-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.851307 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/366d7f87-d059-4950-9c11-3c4cd4d64a58-kube-api-access-mnzpl" (OuterVolumeSpecName: "kube-api-access-mnzpl") pod "366d7f87-d059-4950-9c11-3c4cd4d64a58" (UID: "366d7f87-d059-4950-9c11-3c4cd4d64a58"). InnerVolumeSpecName "kube-api-access-mnzpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.853286 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-scripts" (OuterVolumeSpecName: "scripts") pod "366d7f87-d059-4950-9c11-3c4cd4d64a58" (UID: "366d7f87-d059-4950-9c11-3c4cd4d64a58"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.855291 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "366d7f87-d059-4950-9c11-3c4cd4d64a58" (UID: "366d7f87-d059-4950-9c11-3c4cd4d64a58"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.871044 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.880564 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.913844 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data" (OuterVolumeSpecName: "config-data") pod "366d7f87-d059-4950-9c11-3c4cd4d64a58" (UID: "366d7f87-d059-4950-9c11-3c4cd4d64a58"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.914870 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.971509 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.971546 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.971564 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.971575 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnzpl\" (UniqueName: \"kubernetes.io/projected/366d7f87-d059-4950-9c11-3c4cd4d64a58-kube-api-access-mnzpl\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:11 crc kubenswrapper[4799]: I0121 17:53:11.971598 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/366d7f87-d059-4950-9c11-3c4cd4d64a58-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.036269 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.202565 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8c8a46b8-4c1b-413d-a085-fa3994505174","Type":"ContainerDied","Data":"fa639c39028cdb1d9d2f72559b9e14ddfb7e654459cb9f2c3b2ed6a43aabcf21"} Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.202627 4799 scope.go:117] "RemoveContainer" containerID="04875df20be22c723dbbe658ddf0114d0e3681f8f13589b8e050eca8923a0dfa" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.202810 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.208706 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.215608 4799 generic.go:334] "Generic (PLEG): container finished" podID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerID="feb17023cb77e67cafee7aaedbb3379b33fc70a5c6313f02972807ffc7e15f7f" exitCode=1 Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.217345 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b8ffb2a-2019-41d5-a0fb-c05199bcc230" path="/var/lib/kubelet/pods/4b8ffb2a-2019-41d5-a0fb-c05199bcc230/volumes" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.219823 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-cfcccc69b-6zwk4" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.219855 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"366d7f87-d059-4950-9c11-3c4cd4d64a58","Type":"ContainerDied","Data":"1755b7084e12783ad03d07528e38c4ebb686be64f2bd843b82f6ad6af907083d"} Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.219877 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerDied","Data":"feb17023cb77e67cafee7aaedbb3379b33fc70a5c6313f02972807ffc7e15f7f"} Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.221311 4799 scope.go:117] "RemoveContainer" containerID="feb17023cb77e67cafee7aaedbb3379b33fc70a5c6313f02972807ffc7e15f7f" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.225637 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"64d79edc-abb7-410f-b687-f102bfc189d1","Type":"ContainerDied","Data":"74b5e632569e8467d503f2ea15638295febc26d41c042a2ce88f38715d9147c1"} Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.225909 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.247153 4799 scope.go:117] "RemoveContainer" containerID="73e5793caa1aa068f8556db9aa3ace8701be39f6ff4982bfa1050b63e77dcedc" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.315723 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.319206 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.339993 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dcb766fd4-g29lg"] Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.340300 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dcb766fd4-g29lg" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api-log" containerID="cri-o://04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21" gracePeriod=30 Jan 21 17:53:12 crc kubenswrapper[4799]: I0121 17:53:12.340466 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dcb766fd4-g29lg" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api" containerID="cri-o://61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5" gracePeriod=30 Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:12.381266 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:12.438664 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:12.488419 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9d5d45775-xwjff"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:12.488687 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" podUID="af0f2741-8c09-4555-b8c4-251b9a2de57e" containerName="dnsmasq-dns" containerID="cri-o://23932414f560b8204d12801ee82b40fe3fbc4348408ab863b26c4af0fd32d52a" gracePeriod=10 Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:12.553335 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.277590 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerStarted","Data":"066e87115eb4924cc383405fe3c3c1094f8563f762e8352616ac581652941daa"} Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.367723 4799 scope.go:117] "RemoveContainer" containerID="f8b5f882b9446b1699792a19004f0af42dca2a6fe86dd901c5e8883c45c8b41d" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.405145 4799 generic.go:334] "Generic (PLEG): container finished" podID="af0f2741-8c09-4555-b8c4-251b9a2de57e" containerID="23932414f560b8204d12801ee82b40fe3fbc4348408ab863b26c4af0fd32d52a" exitCode=0 Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.405261 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" event={"ID":"af0f2741-8c09-4555-b8c4-251b9a2de57e","Type":"ContainerDied","Data":"23932414f560b8204d12801ee82b40fe3fbc4348408ab863b26c4af0fd32d52a"} Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.405296 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" event={"ID":"af0f2741-8c09-4555-b8c4-251b9a2de57e","Type":"ContainerDied","Data":"dfdc723537c25f4eb0734ddc187e1c54c08757a211fae9eeb91a82ec6144a6f8"} Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.405305 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfdc723537c25f4eb0734ddc187e1c54c08757a211fae9eeb91a82ec6144a6f8" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.407114 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.412346 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.442573 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.452998 4799 scope.go:117] "RemoveContainer" containerID="719539cd7efab3bbc47ee08173e8b2c604357bbecdc273906d1aa8c66631bf08" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.461406 4799 generic.go:334] "Generic (PLEG): container finished" podID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerID="04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21" exitCode=143 Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.462563 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dcb766fd4-g29lg" event={"ID":"aa02fc34-2263-47cb-90cf-7baedb10be5e","Type":"ContainerDied","Data":"04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21"} Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.499846 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: E0121 17:53:13.500503 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerName="cinder-api-log" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.500587 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerName="cinder-api-log" Jan 21 17:53:13 crc kubenswrapper[4799]: E0121 17:53:13.500607 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerName="cinder-api" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.500615 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerName="cinder-api" Jan 21 17:53:13 crc kubenswrapper[4799]: E0121 17:53:13.500653 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af0f2741-8c09-4555-b8c4-251b9a2de57e" containerName="init" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.500660 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="af0f2741-8c09-4555-b8c4-251b9a2de57e" containerName="init" Jan 21 17:53:13 crc kubenswrapper[4799]: E0121 17:53:13.500679 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" containerName="glance-log" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.500687 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" containerName="glance-log" Jan 21 17:53:13 crc kubenswrapper[4799]: E0121 17:53:13.500716 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af0f2741-8c09-4555-b8c4-251b9a2de57e" containerName="dnsmasq-dns" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.500724 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="af0f2741-8c09-4555-b8c4-251b9a2de57e" containerName="dnsmasq-dns" Jan 21 17:53:13 crc kubenswrapper[4799]: E0121 17:53:13.500737 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" containerName="glance-httpd" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.500743 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" containerName="glance-httpd" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.501010 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="af0f2741-8c09-4555-b8c4-251b9a2de57e" containerName="dnsmasq-dns" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.501034 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerName="cinder-api" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.501058 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" containerName="glance-httpd" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.501071 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" containerName="glance-log" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.501086 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" containerName="cinder-api-log" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.502504 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.517179 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.517712 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.517761 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.517723 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.525406 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.529548 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.542186 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.554173 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.570936 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.605522 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-svc\") pod \"af0f2741-8c09-4555-b8c4-251b9a2de57e\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.605586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-nb\") pod \"af0f2741-8c09-4555-b8c4-251b9a2de57e\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.605624 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-config\") pod \"af0f2741-8c09-4555-b8c4-251b9a2de57e\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.605656 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-swift-storage-0\") pod \"af0f2741-8c09-4555-b8c4-251b9a2de57e\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.605748 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-sb\") pod \"af0f2741-8c09-4555-b8c4-251b9a2de57e\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.605796 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc7nk\" (UniqueName: \"kubernetes.io/projected/af0f2741-8c09-4555-b8c4-251b9a2de57e-kube-api-access-fc7nk\") pod \"af0f2741-8c09-4555-b8c4-251b9a2de57e\" (UID: \"af0f2741-8c09-4555-b8c4-251b9a2de57e\") " Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.674577 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af0f2741-8c09-4555-b8c4-251b9a2de57e-kube-api-access-fc7nk" (OuterVolumeSpecName: "kube-api-access-fc7nk") pod "af0f2741-8c09-4555-b8c4-251b9a2de57e" (UID: "af0f2741-8c09-4555-b8c4-251b9a2de57e"). InnerVolumeSpecName "kube-api-access-fc7nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.678276 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.682916 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.689674 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.690143 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.697099 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.697360 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-hhc5h" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718172 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-scripts\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718269 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-config-data\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718322 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718350 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8bf54a9-bf9c-47d1-b391-b73c0055c935-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718408 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718428 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718447 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whj8r\" (UniqueName: \"kubernetes.io/projected/e8bf54a9-bf9c-47d1-b391-b73c0055c935-kube-api-access-whj8r\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718529 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8bf54a9-bf9c-47d1-b391-b73c0055c935-logs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718586 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-config-data-custom\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.718686 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc7nk\" (UniqueName: \"kubernetes.io/projected/af0f2741-8c09-4555-b8c4-251b9a2de57e-kube-api-access-fc7nk\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.743368 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "af0f2741-8c09-4555-b8c4-251b9a2de57e" (UID: "af0f2741-8c09-4555-b8c4-251b9a2de57e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.753642 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.782389 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.785043 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.786580 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "af0f2741-8c09-4555-b8c4-251b9a2de57e" (UID: "af0f2741-8c09-4555-b8c4-251b9a2de57e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.788690 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.789762 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.804263 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.817239 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "af0f2741-8c09-4555-b8c4-251b9a2de57e" (UID: "af0f2741-8c09-4555-b8c4-251b9a2de57e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.818487 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.818519 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.818543 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821554 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8bf54a9-bf9c-47d1-b391-b73c0055c935-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821610 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821638 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821664 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821684 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821702 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whj8r\" (UniqueName: \"kubernetes.io/projected/e8bf54a9-bf9c-47d1-b391-b73c0055c935-kube-api-access-whj8r\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821759 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8bf54a9-bf9c-47d1-b391-b73c0055c935-logs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821777 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821804 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-config-data-custom\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821828 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821845 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821863 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-logs\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821888 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-scripts\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821917 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjt5r\" (UniqueName: \"kubernetes.io/projected/5ff098a1-6561-4017-98c9-3014f678faad-kube-api-access-bjt5r\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821948 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821965 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-config-data\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.821990 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.823215 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8bf54a9-bf9c-47d1-b391-b73c0055c935-logs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.824206 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e8bf54a9-bf9c-47d1-b391-b73c0055c935-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.830571 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.832174 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.832403 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.832428 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.848586 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.852787 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whj8r\" (UniqueName: \"kubernetes.io/projected/e8bf54a9-bf9c-47d1-b391-b73c0055c935-kube-api-access-whj8r\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.853074 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.853115 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-config-data\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.853215 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-config" (OuterVolumeSpecName: "config") pod "af0f2741-8c09-4555-b8c4-251b9a2de57e" (UID: "af0f2741-8c09-4555-b8c4-251b9a2de57e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.853377 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-config-data-custom\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.853651 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8bf54a9-bf9c-47d1-b391-b73c0055c935-scripts\") pod \"cinder-api-0\" (UID: \"e8bf54a9-bf9c-47d1-b391-b73c0055c935\") " pod="openstack/cinder-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.854603 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "af0f2741-8c09-4555-b8c4-251b9a2de57e" (UID: "af0f2741-8c09-4555-b8c4-251b9a2de57e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.934241 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.944642 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-logs\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.944973 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.945083 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.945227 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-logs\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.945855 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjt5r\" (UniqueName: \"kubernetes.io/projected/5ff098a1-6561-4017-98c9-3014f678faad-kube-api-access-bjt5r\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.945996 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-scripts\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.946173 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-config-data\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.950274 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv9q4\" (UniqueName: \"kubernetes.io/projected/21e4d907-4ebb-4839-ab96-454bd4d08954-kube-api-access-bv9q4\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.950638 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.950851 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.950984 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.951148 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.951297 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.958041 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.958240 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.958802 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.958940 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/af0f2741-8c09-4555-b8c4-251b9a2de57e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.948705 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.949289 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-logs\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.959732 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.963850 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:13 crc kubenswrapper[4799]: I0121 17:53:13.970980 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.023663 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.028810 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.031327 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.032785 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjt5r\" (UniqueName: \"kubernetes.io/projected/5ff098a1-6561-4017-98c9-3014f678faad-kube-api-access-bjt5r\") pod \"glance-default-internal-api-0\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.064018 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-scripts\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.064086 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-config-data\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.064119 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv9q4\" (UniqueName: \"kubernetes.io/projected/21e4d907-4ebb-4839-ab96-454bd4d08954-kube-api-access-bv9q4\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.064182 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.064205 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.064232 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.064273 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.064353 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-logs\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.065023 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-logs\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.069996 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.071757 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-scripts\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.072381 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.072676 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.075305 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-config-data\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.075974 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.096053 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv9q4\" (UniqueName: \"kubernetes.io/projected/21e4d907-4ebb-4839-ab96-454bd4d08954-kube-api-access-bv9q4\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.291894 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.323158 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="366d7f87-d059-4950-9c11-3c4cd4d64a58" path="/var/lib/kubelet/pods/366d7f87-d059-4950-9c11-3c4cd4d64a58/volumes" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.323811 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64d79edc-abb7-410f-b687-f102bfc189d1" path="/var/lib/kubelet/pods/64d79edc-abb7-410f-b687-f102bfc189d1/volumes" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.324835 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c8a46b8-4c1b-413d-a085-fa3994505174" path="/var/lib/kubelet/pods/8c8a46b8-4c1b-413d-a085-fa3994505174/volumes" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.447151 4799 scope.go:117] "RemoveContainer" containerID="49aba02777a3f86b24bad8928534af44cc4e251edf34bb29b165d3b6d9f00d6a" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.474214 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.485988 4799 scope.go:117] "RemoveContainer" containerID="f25222919a2456e7386f4791b3094a4c3cba6766ca45a09c6b86f8ea814b4ff1" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.503412 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9d5d45775-xwjff" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.504984 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerName="cinder-scheduler" containerID="cri-o://fa94f6751f355a4267199fb8670a480539dfe896d65aa307d00f129d20f75045" gracePeriod=30 Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.505416 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerName="probe" containerID="cri-o://cb91333aefe48ff642fb30e865171b3cb1000c1a25db0b45c4d5ac75c209c400" gracePeriod=30 Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.510983 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.523554 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.566735 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9d5d45775-xwjff"] Jan 21 17:53:14 crc kubenswrapper[4799]: I0121 17:53:14.592917 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9d5d45775-xwjff"] Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.069545 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.447836 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 21 17:53:15 crc kubenswrapper[4799]: W0121 17:53:15.458973 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8bf54a9_bf9c_47d1_b391_b73c0055c935.slice/crio-aec72888d68247d59bffdb94f53d8be128be2b14c1d919969b291b94d26a7ed6 WatchSource:0}: Error finding container aec72888d68247d59bffdb94f53d8be128be2b14c1d919969b291b94d26a7ed6: Status 404 returned error can't find the container with id aec72888d68247d59bffdb94f53d8be128be2b14c1d919969b291b94d26a7ed6 Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.585074 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e8bf54a9-bf9c-47d1-b391-b73c0055c935","Type":"ContainerStarted","Data":"aec72888d68247d59bffdb94f53d8be128be2b14c1d919969b291b94d26a7ed6"} Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.625630 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerStarted","Data":"d3aaffd6bf3f96c9b7d3914da4516463321849f010304db2511463d6ce079929"} Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.791359 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerStarted","Data":"36846e56ece6d0c4b28e55e88c26357a286f618d878556c6e3c7d54caaadca87"} Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.791621 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerStarted","Data":"3cc1bd6b58b490cb895f45e1aedf80494e8157a3d8bce95caef1f4ce0d5d933e"} Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.827567 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.920032 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dcb766fd4-g29lg" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.172:9311/healthcheck\": read tcp 10.217.0.2:34420->10.217.0.172:9311: read: connection reset by peer" Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.920648 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dcb766fd4-g29lg" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.172:9311/healthcheck\": read tcp 10.217.0.2:34412->10.217.0.172:9311: read: connection reset by peer" Jan 21 17:53:15 crc kubenswrapper[4799]: I0121 17:53:15.987695 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.233107 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af0f2741-8c09-4555-b8c4-251b9a2de57e" path="/var/lib/kubelet/pods/af0f2741-8c09-4555-b8c4-251b9a2de57e/volumes" Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.723972 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-55f8df6d54-cffcw" Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.896899 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ff098a1-6561-4017-98c9-3014f678faad","Type":"ContainerStarted","Data":"6cc85f0db739df8a87ec3fa0d3b20326292782eb902daada63ea1250ad30b890"} Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.939449 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21e4d907-4ebb-4839-ab96-454bd4d08954","Type":"ContainerStarted","Data":"9e966b3199c5ea754d6200465d2a4f8631e91d1f0a0042d8d25ad78c7bda2df1"} Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.953187 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.966394 4799 generic.go:334] "Generic (PLEG): container finished" podID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerID="cb91333aefe48ff642fb30e865171b3cb1000c1a25db0b45c4d5ac75c209c400" exitCode=0 Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.966436 4799 generic.go:334] "Generic (PLEG): container finished" podID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerID="fa94f6751f355a4267199fb8670a480539dfe896d65aa307d00f129d20f75045" exitCode=0 Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.966516 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"744eebc5-bc8e-4aaa-9165-35498dadc94a","Type":"ContainerDied","Data":"cb91333aefe48ff642fb30e865171b3cb1000c1a25db0b45c4d5ac75c209c400"} Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.966552 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"744eebc5-bc8e-4aaa-9165-35498dadc94a","Type":"ContainerDied","Data":"fa94f6751f355a4267199fb8670a480539dfe896d65aa307d00f129d20f75045"} Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.975531 4799 generic.go:334] "Generic (PLEG): container finished" podID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerID="61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5" exitCode=0 Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.976582 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dcb766fd4-g29lg" Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.976757 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dcb766fd4-g29lg" event={"ID":"aa02fc34-2263-47cb-90cf-7baedb10be5e","Type":"ContainerDied","Data":"61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5"} Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.976786 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dcb766fd4-g29lg" event={"ID":"aa02fc34-2263-47cb-90cf-7baedb10be5e","Type":"ContainerDied","Data":"f73abbce1d2408b8c314c73e0c563a67e9d0f9f93afbdc86bf6949ccc9c61af9"} Jan 21 17:53:16 crc kubenswrapper[4799]: I0121 17:53:16.976803 4799 scope.go:117] "RemoveContainer" containerID="61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.068861 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-combined-ca-bundle\") pod \"aa02fc34-2263-47cb-90cf-7baedb10be5e\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.070621 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa02fc34-2263-47cb-90cf-7baedb10be5e-logs\") pod \"aa02fc34-2263-47cb-90cf-7baedb10be5e\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.070779 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data\") pod \"aa02fc34-2263-47cb-90cf-7baedb10be5e\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.070860 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data-custom\") pod \"aa02fc34-2263-47cb-90cf-7baedb10be5e\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.071095 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkmx7\" (UniqueName: \"kubernetes.io/projected/aa02fc34-2263-47cb-90cf-7baedb10be5e-kube-api-access-lkmx7\") pod \"aa02fc34-2263-47cb-90cf-7baedb10be5e\" (UID: \"aa02fc34-2263-47cb-90cf-7baedb10be5e\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.090681 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa02fc34-2263-47cb-90cf-7baedb10be5e-kube-api-access-lkmx7" (OuterVolumeSpecName: "kube-api-access-lkmx7") pod "aa02fc34-2263-47cb-90cf-7baedb10be5e" (UID: "aa02fc34-2263-47cb-90cf-7baedb10be5e"). InnerVolumeSpecName "kube-api-access-lkmx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.090846 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa02fc34-2263-47cb-90cf-7baedb10be5e-logs" (OuterVolumeSpecName: "logs") pod "aa02fc34-2263-47cb-90cf-7baedb10be5e" (UID: "aa02fc34-2263-47cb-90cf-7baedb10be5e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.123516 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "aa02fc34-2263-47cb-90cf-7baedb10be5e" (UID: "aa02fc34-2263-47cb-90cf-7baedb10be5e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.156696 4799 scope.go:117] "RemoveContainer" containerID="04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.177491 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkmx7\" (UniqueName: \"kubernetes.io/projected/aa02fc34-2263-47cb-90cf-7baedb10be5e-kube-api-access-lkmx7\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.177518 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa02fc34-2263-47cb-90cf-7baedb10be5e-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.177529 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.212272 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa02fc34-2263-47cb-90cf-7baedb10be5e" (UID: "aa02fc34-2263-47cb-90cf-7baedb10be5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.247239 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data" (OuterVolumeSpecName: "config-data") pod "aa02fc34-2263-47cb-90cf-7baedb10be5e" (UID: "aa02fc34-2263-47cb-90cf-7baedb10be5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.279444 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.279701 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa02fc34-2263-47cb-90cf-7baedb10be5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:17 crc kubenswrapper[4799]: E0121 17:53:17.283912 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod744eebc5_bc8e_4aaa_9165_35498dadc94a.slice/crio-fa94f6751f355a4267199fb8670a480539dfe896d65aa307d00f129d20f75045.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod744eebc5_bc8e_4aaa_9165_35498dadc94a.slice/crio-conmon-fa94f6751f355a4267199fb8670a480539dfe896d65aa307d00f129d20f75045.scope\": RecentStats: unable to find data in memory cache]" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.377653 4799 scope.go:117] "RemoveContainer" containerID="61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5" Jan 21 17:53:17 crc kubenswrapper[4799]: E0121 17:53:17.379014 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5\": container with ID starting with 61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5 not found: ID does not exist" containerID="61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.379060 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5"} err="failed to get container status \"61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5\": rpc error: code = NotFound desc = could not find container \"61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5\": container with ID starting with 61f209ff39eb7284a64f9fd5b461c8636003e84acec3f15340dbde5ba06129b5 not found: ID does not exist" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.379088 4799 scope.go:117] "RemoveContainer" containerID="04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21" Jan 21 17:53:17 crc kubenswrapper[4799]: E0121 17:53:17.389401 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21\": container with ID starting with 04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21 not found: ID does not exist" containerID="04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.389466 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21"} err="failed to get container status \"04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21\": rpc error: code = NotFound desc = could not find container \"04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21\": container with ID starting with 04e72e40123c0bbcb35c145b03d0dd2cf941c3ae38d27d6d61ca9e3df78f2e21 not found: ID does not exist" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.460280 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dcb766fd4-g29lg"] Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.481881 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5dcb766fd4-g29lg"] Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.801441 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.897426 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data\") pod \"744eebc5-bc8e-4aaa-9165-35498dadc94a\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.897516 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/744eebc5-bc8e-4aaa-9165-35498dadc94a-etc-machine-id\") pod \"744eebc5-bc8e-4aaa-9165-35498dadc94a\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.897639 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zq4l2\" (UniqueName: \"kubernetes.io/projected/744eebc5-bc8e-4aaa-9165-35498dadc94a-kube-api-access-zq4l2\") pod \"744eebc5-bc8e-4aaa-9165-35498dadc94a\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.897693 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-combined-ca-bundle\") pod \"744eebc5-bc8e-4aaa-9165-35498dadc94a\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.897790 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data-custom\") pod \"744eebc5-bc8e-4aaa-9165-35498dadc94a\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.897846 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-scripts\") pod \"744eebc5-bc8e-4aaa-9165-35498dadc94a\" (UID: \"744eebc5-bc8e-4aaa-9165-35498dadc94a\") " Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.898481 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/744eebc5-bc8e-4aaa-9165-35498dadc94a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "744eebc5-bc8e-4aaa-9165-35498dadc94a" (UID: "744eebc5-bc8e-4aaa-9165-35498dadc94a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.904876 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "744eebc5-bc8e-4aaa-9165-35498dadc94a" (UID: "744eebc5-bc8e-4aaa-9165-35498dadc94a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.908278 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-scripts" (OuterVolumeSpecName: "scripts") pod "744eebc5-bc8e-4aaa-9165-35498dadc94a" (UID: "744eebc5-bc8e-4aaa-9165-35498dadc94a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:17 crc kubenswrapper[4799]: I0121 17:53:17.911686 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/744eebc5-bc8e-4aaa-9165-35498dadc94a-kube-api-access-zq4l2" (OuterVolumeSpecName: "kube-api-access-zq4l2") pod "744eebc5-bc8e-4aaa-9165-35498dadc94a" (UID: "744eebc5-bc8e-4aaa-9165-35498dadc94a"). InnerVolumeSpecName "kube-api-access-zq4l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.003486 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.003553 4799 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/744eebc5-bc8e-4aaa-9165-35498dadc94a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.003570 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zq4l2\" (UniqueName: \"kubernetes.io/projected/744eebc5-bc8e-4aaa-9165-35498dadc94a-kube-api-access-zq4l2\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.003583 4799 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.010775 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ff098a1-6561-4017-98c9-3014f678faad","Type":"ContainerStarted","Data":"919a34b4723f6e4a231589d646cca27f8272d69e83efd3c9631383fc2e9194e2"} Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.021153 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerStarted","Data":"ee930308a237b29e6aa57ad4b16a4156f73c3528c289a0d170147bb714ff9ea3"} Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.023961 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e8bf54a9-bf9c-47d1-b391-b73c0055c935","Type":"ContainerStarted","Data":"585498d70e6971175ff238e1f3a33bcc94823caa43c34924f6f7eef55627c73f"} Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.047705 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"744eebc5-bc8e-4aaa-9165-35498dadc94a","Type":"ContainerDied","Data":"0d4b815aa0764eaa650bda10bd507ece045682da60b40e13119c26f357fbb95d"} Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.047760 4799 scope.go:117] "RemoveContainer" containerID="cb91333aefe48ff642fb30e865171b3cb1000c1a25db0b45c4d5ac75c209c400" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.047913 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.104066 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "744eebc5-bc8e-4aaa-9165-35498dadc94a" (UID: "744eebc5-bc8e-4aaa-9165-35498dadc94a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.105645 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.128337 4799 scope.go:117] "RemoveContainer" containerID="fa94f6751f355a4267199fb8670a480539dfe896d65aa307d00f129d20f75045" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.163544 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data" (OuterVolumeSpecName: "config-data") pod "744eebc5-bc8e-4aaa-9165-35498dadc94a" (UID: "744eebc5-bc8e-4aaa-9165-35498dadc94a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.207886 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/744eebc5-bc8e-4aaa-9165-35498dadc94a-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.225295 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" path="/var/lib/kubelet/pods/aa02fc34-2263-47cb-90cf-7baedb10be5e/volumes" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.409319 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.457610 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.472723 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:18 crc kubenswrapper[4799]: E0121 17:53:18.473738 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api-log" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.473764 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api-log" Jan 21 17:53:18 crc kubenswrapper[4799]: E0121 17:53:18.473804 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.473812 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api" Jan 21 17:53:18 crc kubenswrapper[4799]: E0121 17:53:18.473852 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerName="cinder-scheduler" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.473861 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerName="cinder-scheduler" Jan 21 17:53:18 crc kubenswrapper[4799]: E0121 17:53:18.473879 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerName="probe" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.473885 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerName="probe" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.474421 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.474472 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerName="cinder-scheduler" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.474497 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" containerName="probe" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.474529 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa02fc34-2263-47cb-90cf-7baedb10be5e" containerName="barbican-api-log" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.492458 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.496003 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.538470 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.546161 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-scripts\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.546317 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.546430 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x4zc\" (UniqueName: \"kubernetes.io/projected/eb283f72-f347-49c6-9813-6fece9fc70da-kube-api-access-2x4zc\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.546543 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-config-data\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.546738 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.547069 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eb283f72-f347-49c6-9813-6fece9fc70da-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.648710 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-config-data\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.648785 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.648927 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eb283f72-f347-49c6-9813-6fece9fc70da-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.648962 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-scripts\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.648983 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.649015 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x4zc\" (UniqueName: \"kubernetes.io/projected/eb283f72-f347-49c6-9813-6fece9fc70da-kube-api-access-2x4zc\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.649492 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eb283f72-f347-49c6-9813-6fece9fc70da-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.661952 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.662407 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-scripts\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.664651 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-86999674c5-gpgq6" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.668382 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.670719 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x4zc\" (UniqueName: \"kubernetes.io/projected/eb283f72-f347-49c6-9813-6fece9fc70da-kube-api-access-2x4zc\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.685755 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb283f72-f347-49c6-9813-6fece9fc70da-config-data\") pod \"cinder-scheduler-0\" (UID: \"eb283f72-f347-49c6-9813-6fece9fc70da\") " pod="openstack/cinder-scheduler-0" Jan 21 17:53:18 crc kubenswrapper[4799]: I0121 17:53:18.859650 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.069803 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.113881 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ff098a1-6561-4017-98c9-3014f678faad","Type":"ContainerStarted","Data":"9f0fc1351f8528068db150962a8958eda5dbc0473e91ef43e13f35d02bb6ca78"} Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.127536 4799 generic.go:334] "Generic (PLEG): container finished" podID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerID="07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755" exitCode=0 Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.127696 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9474f76d-ptsv9" event={"ID":"57391f37-88fc-4dca-9afd-159d78c47ca1","Type":"ContainerDied","Data":"07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755"} Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.127752 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c9474f76d-ptsv9" event={"ID":"57391f37-88fc-4dca-9afd-159d78c47ca1","Type":"ContainerDied","Data":"37fc0a1cb9cce7d310bfe07dd8eb0fd5efca1c4d94728b7e3867134d52c69a88"} Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.127775 4799 scope.go:117] "RemoveContainer" containerID="e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.128053 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c9474f76d-ptsv9" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.149980 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.149960029 podStartE2EDuration="6.149960029s" podCreationTimestamp="2026-01-21 17:53:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:19.136900143 +0000 UTC m=+1225.763190176" watchObservedRunningTime="2026-01-21 17:53:19.149960029 +0000 UTC m=+1225.776250052" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.156116 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21e4d907-4ebb-4839-ab96-454bd4d08954","Type":"ContainerStarted","Data":"8926411eb04e63565c5b583071005e198c2fbad3bfe9f0d039f0483c2712051f"} Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.159781 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e8bf54a9-bf9c-47d1-b391-b73c0055c935","Type":"ContainerStarted","Data":"fc1d3039783cd51e7a24310429365857a5846a4675512bb6d1a662af8874ad51"} Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.160503 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.185467 4799 scope.go:117] "RemoveContainer" containerID="07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.199018 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.19898596 podStartE2EDuration="6.19898596s" podCreationTimestamp="2026-01-21 17:53:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:19.189073453 +0000 UTC m=+1225.815363476" watchObservedRunningTime="2026-01-21 17:53:19.19898596 +0000 UTC m=+1225.825275983" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.222180 4799 scope.go:117] "RemoveContainer" containerID="e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc" Jan 21 17:53:19 crc kubenswrapper[4799]: E0121 17:53:19.227283 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc\": container with ID starting with e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc not found: ID does not exist" containerID="e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.227346 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc"} err="failed to get container status \"e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc\": rpc error: code = NotFound desc = could not find container \"e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc\": container with ID starting with e200b63c65b19acd0df9eee80040ffdddf00f005ff88c6d89d24eff56f448dbc not found: ID does not exist" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.227396 4799 scope.go:117] "RemoveContainer" containerID="07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755" Jan 21 17:53:19 crc kubenswrapper[4799]: E0121 17:53:19.231532 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755\": container with ID starting with 07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755 not found: ID does not exist" containerID="07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.231611 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755"} err="failed to get container status \"07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755\": rpc error: code = NotFound desc = could not find container \"07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755\": container with ID starting with 07d914c9327fc6d601567f76ce1987ea59b0494b1c050f06533a99a64c744755 not found: ID does not exist" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.262886 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-httpd-config\") pod \"57391f37-88fc-4dca-9afd-159d78c47ca1\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.262935 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-combined-ca-bundle\") pod \"57391f37-88fc-4dca-9afd-159d78c47ca1\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.262998 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-ovndb-tls-certs\") pod \"57391f37-88fc-4dca-9afd-159d78c47ca1\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.263068 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-config\") pod \"57391f37-88fc-4dca-9afd-159d78c47ca1\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.263206 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xj7tf\" (UniqueName: \"kubernetes.io/projected/57391f37-88fc-4dca-9afd-159d78c47ca1-kube-api-access-xj7tf\") pod \"57391f37-88fc-4dca-9afd-159d78c47ca1\" (UID: \"57391f37-88fc-4dca-9afd-159d78c47ca1\") " Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.288883 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "57391f37-88fc-4dca-9afd-159d78c47ca1" (UID: "57391f37-88fc-4dca-9afd-159d78c47ca1"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.290238 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57391f37-88fc-4dca-9afd-159d78c47ca1-kube-api-access-xj7tf" (OuterVolumeSpecName: "kube-api-access-xj7tf") pod "57391f37-88fc-4dca-9afd-159d78c47ca1" (UID: "57391f37-88fc-4dca-9afd-159d78c47ca1"). InnerVolumeSpecName "kube-api-access-xj7tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.366601 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.366636 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xj7tf\" (UniqueName: \"kubernetes.io/projected/57391f37-88fc-4dca-9afd-159d78c47ca1-kube-api-access-xj7tf\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.431313 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "57391f37-88fc-4dca-9afd-159d78c47ca1" (UID: "57391f37-88fc-4dca-9afd-159d78c47ca1"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.434340 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57391f37-88fc-4dca-9afd-159d78c47ca1" (UID: "57391f37-88fc-4dca-9afd-159d78c47ca1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.461770 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.469991 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.470041 4799 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.491066 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-config" (OuterVolumeSpecName: "config") pod "57391f37-88fc-4dca-9afd-159d78c47ca1" (UID: "57391f37-88fc-4dca-9afd-159d78c47ca1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.585304 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/57391f37-88fc-4dca-9afd-159d78c47ca1-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.783225 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7c9474f76d-ptsv9"] Jan 21 17:53:19 crc kubenswrapper[4799]: I0121 17:53:19.791120 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7c9474f76d-ptsv9"] Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.113327 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7d9c7df8bb-b2r9b" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.158:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.158:8443: connect: connection refused" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.113658 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.177425 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21e4d907-4ebb-4839-ab96-454bd4d08954","Type":"ContainerStarted","Data":"a6183f03da598d687e7dcc3db8033af4efa220d4e56c96b8925d84fc692ea091"} Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.180373 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerStarted","Data":"c7091f62fc43bcecafa9356507579c6eb65acb4d37d20ba7f981b82ad72d9e7d"} Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.180666 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.183438 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"eb283f72-f347-49c6-9813-6fece9fc70da","Type":"ContainerStarted","Data":"2c009679069a51d141caa06c5bc3e2d8f43cb8e57a6fae42e2258e5a753710e6"} Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.212689 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.212668902 podStartE2EDuration="7.212668902s" podCreationTimestamp="2026-01-21 17:53:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:20.196713936 +0000 UTC m=+1226.823003949" watchObservedRunningTime="2026-01-21 17:53:20.212668902 +0000 UTC m=+1226.838958925" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.227686 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.543399236 podStartE2EDuration="9.227664502s" podCreationTimestamp="2026-01-21 17:53:11 +0000 UTC" firstStartedPulling="2026-01-21 17:53:13.160972519 +0000 UTC m=+1219.787262542" lastFinishedPulling="2026-01-21 17:53:18.845237785 +0000 UTC m=+1225.471527808" observedRunningTime="2026-01-21 17:53:20.221557901 +0000 UTC m=+1226.847847944" watchObservedRunningTime="2026-01-21 17:53:20.227664502 +0000 UTC m=+1226.853954525" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.239913 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" path="/var/lib/kubelet/pods/57391f37-88fc-4dca-9afd-159d78c47ca1/volumes" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.242725 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="744eebc5-bc8e-4aaa-9165-35498dadc94a" path="/var/lib/kubelet/pods/744eebc5-bc8e-4aaa-9165-35498dadc94a/volumes" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.972829 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 21 17:53:20 crc kubenswrapper[4799]: E0121 17:53:20.973555 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerName="neutron-httpd" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.973573 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerName="neutron-httpd" Jan 21 17:53:20 crc kubenswrapper[4799]: E0121 17:53:20.973595 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerName="neutron-api" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.973600 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerName="neutron-api" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.973789 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerName="neutron-api" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.973813 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="57391f37-88fc-4dca-9afd-159d78c47ca1" containerName="neutron-httpd" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.974485 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.977509 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-r5vrx" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.977824 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.983428 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 21 17:53:20 crc kubenswrapper[4799]: I0121 17:53:20.987648 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.119505 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8dc291-a487-43d0-a494-9496737703ef-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.121320 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ff8dc291-a487-43d0-a494-9496737703ef-openstack-config\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.121399 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ff8dc291-a487-43d0-a494-9496737703ef-openstack-config-secret\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.121452 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74xvk\" (UniqueName: \"kubernetes.io/projected/ff8dc291-a487-43d0-a494-9496737703ef-kube-api-access-74xvk\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.224214 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ff8dc291-a487-43d0-a494-9496737703ef-openstack-config\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.224288 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ff8dc291-a487-43d0-a494-9496737703ef-openstack-config-secret\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.224327 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74xvk\" (UniqueName: \"kubernetes.io/projected/ff8dc291-a487-43d0-a494-9496737703ef-kube-api-access-74xvk\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.224427 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8dc291-a487-43d0-a494-9496737703ef-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.238591 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8dc291-a487-43d0-a494-9496737703ef-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.239505 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ff8dc291-a487-43d0-a494-9496737703ef-openstack-config\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.243661 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ff8dc291-a487-43d0-a494-9496737703ef-openstack-config-secret\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.263958 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74xvk\" (UniqueName: \"kubernetes.io/projected/ff8dc291-a487-43d0-a494-9496737703ef-kube-api-access-74xvk\") pod \"openstackclient\" (UID: \"ff8dc291-a487-43d0-a494-9496737703ef\") " pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.636621 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.653468 4799 generic.go:334] "Generic (PLEG): container finished" podID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerID="d3aaffd6bf3f96c9b7d3914da4516463321849f010304db2511463d6ce079929" exitCode=1 Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.653554 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerDied","Data":"d3aaffd6bf3f96c9b7d3914da4516463321849f010304db2511463d6ce079929"} Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.653614 4799 scope.go:117] "RemoveContainer" containerID="feb17023cb77e67cafee7aaedbb3379b33fc70a5c6313f02972807ffc7e15f7f" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.657353 4799 scope.go:117] "RemoveContainer" containerID="d3aaffd6bf3f96c9b7d3914da4516463321849f010304db2511463d6ce079929" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.657724 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"eb283f72-f347-49c6-9813-6fece9fc70da","Type":"ContainerStarted","Data":"450b1ca573c355e526111625aaea2090022ff5a538628b8af67d197e135e6a41"} Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.657848 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"eb283f72-f347-49c6-9813-6fece9fc70da","Type":"ContainerStarted","Data":"ffbd4810c088c0f88e381abb19c2a45f9e39ef4ac96e83fb0a52e36708382ac7"} Jan 21 17:53:21 crc kubenswrapper[4799]: E0121 17:53:21.658039 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(e3555046-24d9-4700-bdb8-0a09c35f651a)\"" pod="openstack/watcher-decision-engine-0" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" Jan 21 17:53:21 crc kubenswrapper[4799]: I0121 17:53:21.712859 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.712831911 podStartE2EDuration="3.712831911s" podCreationTimestamp="2026-01-21 17:53:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:21.706026211 +0000 UTC m=+1228.332316234" watchObservedRunningTime="2026-01-21 17:53:21.712831911 +0000 UTC m=+1228.339121934" Jan 21 17:53:22 crc kubenswrapper[4799]: I0121 17:53:22.150445 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 21 17:53:22 crc kubenswrapper[4799]: I0121 17:53:22.667511 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ff8dc291-a487-43d0-a494-9496737703ef","Type":"ContainerStarted","Data":"ab76e031ed3e3605f3df09b41e5083769ab4fb0d62a7bd9e49c0116a3313f46d"} Jan 21 17:53:23 crc kubenswrapper[4799]: I0121 17:53:23.819723 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:23 crc kubenswrapper[4799]: I0121 17:53:23.820076 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:23 crc kubenswrapper[4799]: I0121 17:53:23.821219 4799 scope.go:117] "RemoveContainer" containerID="d3aaffd6bf3f96c9b7d3914da4516463321849f010304db2511463d6ce079929" Jan 21 17:53:23 crc kubenswrapper[4799]: E0121 17:53:23.821519 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(e3555046-24d9-4700-bdb8-0a09c35f651a)\"" pod="openstack/watcher-decision-engine-0" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" Jan 21 17:53:23 crc kubenswrapper[4799]: I0121 17:53:23.860575 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.517354 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.517412 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.524571 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.524647 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.561502 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.565368 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.569671 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.577284 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.708799 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.709090 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.709266 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:24 crc kubenswrapper[4799]: I0121 17:53:24.709675 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:25 crc kubenswrapper[4799]: I0121 17:53:25.970917 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:53:25 crc kubenswrapper[4799]: I0121 17:53:25.971352 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.761114 4799 generic.go:334] "Generic (PLEG): container finished" podID="fde84d23-f64f-4299-af94-1d29894acdc0" containerID="11f21f7e5deaa70ee0d77740377532b2349b1405cb6eb0d5c203aacc4806a2a7" exitCode=137 Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.761544 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.761554 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.762219 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d9c7df8bb-b2r9b" event={"ID":"fde84d23-f64f-4299-af94-1d29894acdc0","Type":"ContainerDied","Data":"11f21f7e5deaa70ee0d77740377532b2349b1405cb6eb0d5c203aacc4806a2a7"} Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.762287 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d9c7df8bb-b2r9b" event={"ID":"fde84d23-f64f-4299-af94-1d29894acdc0","Type":"ContainerDied","Data":"779e83bd8e5768e28f3d74bd9085d854a207a29d6c1165c9c7a49b899c0dd19a"} Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.762306 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="779e83bd8e5768e28f3d74bd9085d854a207a29d6c1165c9c7a49b899c0dd19a" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.817643 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.873220 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-combined-ca-bundle\") pod \"fde84d23-f64f-4299-af94-1d29894acdc0\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.873289 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-scripts\") pod \"fde84d23-f64f-4299-af94-1d29894acdc0\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.873484 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-tls-certs\") pod \"fde84d23-f64f-4299-af94-1d29894acdc0\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.873584 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-config-data\") pod \"fde84d23-f64f-4299-af94-1d29894acdc0\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.873622 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-secret-key\") pod \"fde84d23-f64f-4299-af94-1d29894acdc0\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.873651 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9st5k\" (UniqueName: \"kubernetes.io/projected/fde84d23-f64f-4299-af94-1d29894acdc0-kube-api-access-9st5k\") pod \"fde84d23-f64f-4299-af94-1d29894acdc0\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.873706 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fde84d23-f64f-4299-af94-1d29894acdc0-logs\") pod \"fde84d23-f64f-4299-af94-1d29894acdc0\" (UID: \"fde84d23-f64f-4299-af94-1d29894acdc0\") " Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.875567 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fde84d23-f64f-4299-af94-1d29894acdc0-logs" (OuterVolumeSpecName: "logs") pod "fde84d23-f64f-4299-af94-1d29894acdc0" (UID: "fde84d23-f64f-4299-af94-1d29894acdc0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.884345 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fde84d23-f64f-4299-af94-1d29894acdc0-kube-api-access-9st5k" (OuterVolumeSpecName: "kube-api-access-9st5k") pod "fde84d23-f64f-4299-af94-1d29894acdc0" (UID: "fde84d23-f64f-4299-af94-1d29894acdc0"). InnerVolumeSpecName "kube-api-access-9st5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.891427 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fde84d23-f64f-4299-af94-1d29894acdc0" (UID: "fde84d23-f64f-4299-af94-1d29894acdc0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.960447 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-scripts" (OuterVolumeSpecName: "scripts") pod "fde84d23-f64f-4299-af94-1d29894acdc0" (UID: "fde84d23-f64f-4299-af94-1d29894acdc0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.980495 4799 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.980529 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9st5k\" (UniqueName: \"kubernetes.io/projected/fde84d23-f64f-4299-af94-1d29894acdc0-kube-api-access-9st5k\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.980542 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fde84d23-f64f-4299-af94-1d29894acdc0-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.980554 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:26 crc kubenswrapper[4799]: I0121 17:53:26.982306 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fde84d23-f64f-4299-af94-1d29894acdc0" (UID: "fde84d23-f64f-4299-af94-1d29894acdc0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.020858 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-config-data" (OuterVolumeSpecName: "config-data") pod "fde84d23-f64f-4299-af94-1d29894acdc0" (UID: "fde84d23-f64f-4299-af94-1d29894acdc0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.028524 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "fde84d23-f64f-4299-af94-1d29894acdc0" (UID: "fde84d23-f64f-4299-af94-1d29894acdc0"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.082647 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fde84d23-f64f-4299-af94-1d29894acdc0-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.082695 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.082713 4799 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fde84d23-f64f-4299-af94-1d29894acdc0-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.486720 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.774740 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d9c7df8bb-b2r9b" Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.823864 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7d9c7df8bb-b2r9b"] Jan 21 17:53:27 crc kubenswrapper[4799]: I0121 17:53:27.831954 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7d9c7df8bb-b2r9b"] Jan 21 17:53:28 crc kubenswrapper[4799]: I0121 17:53:28.202650 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 21 17:53:28 crc kubenswrapper[4799]: I0121 17:53:28.204150 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:53:28 crc kubenswrapper[4799]: I0121 17:53:28.223611 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" path="/var/lib/kubelet/pods/fde84d23-f64f-4299-af94-1d29894acdc0/volumes" Jan 21 17:53:28 crc kubenswrapper[4799]: I0121 17:53:28.224319 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:28 crc kubenswrapper[4799]: I0121 17:53:28.224354 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 21 17:53:28 crc kubenswrapper[4799]: I0121 17:53:28.309378 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 21 17:53:29 crc kubenswrapper[4799]: I0121 17:53:29.082854 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.148967 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-749b6794b5-k8rw7"] Jan 21 17:53:30 crc kubenswrapper[4799]: E0121 17:53:30.149951 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon-log" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.149974 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon-log" Jan 21 17:53:30 crc kubenswrapper[4799]: E0121 17:53:30.149992 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.150004 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.150365 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon-log" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.150400 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fde84d23-f64f-4299-af94-1d29894acdc0" containerName="horizon" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.151967 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.296978 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.297282 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.301638 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.388273 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-749b6794b5-k8rw7"] Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.395604 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-public-tls-certs\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.395685 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l28ct\" (UniqueName: \"kubernetes.io/projected/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-kube-api-access-l28ct\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.395716 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-internal-tls-certs\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.395768 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-config-data\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.395799 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-run-httpd\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.395819 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-combined-ca-bundle\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.395847 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-etc-swift\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.395903 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-log-httpd\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.424713 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.425008 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="ceilometer-central-agent" containerID="cri-o://3cc1bd6b58b490cb895f45e1aedf80494e8157a3d8bce95caef1f4ce0d5d933e" gracePeriod=30 Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.425616 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="proxy-httpd" containerID="cri-o://c7091f62fc43bcecafa9356507579c6eb65acb4d37d20ba7f981b82ad72d9e7d" gracePeriod=30 Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.425703 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="sg-core" containerID="cri-o://ee930308a237b29e6aa57ad4b16a4156f73c3528c289a0d170147bb714ff9ea3" gracePeriod=30 Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.425762 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="ceilometer-notification-agent" containerID="cri-o://36846e56ece6d0c4b28e55e88c26357a286f618d878556c6e3c7d54caaadca87" gracePeriod=30 Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.500544 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-internal-tls-certs\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.500940 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-config-data\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.500989 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-run-httpd\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.501014 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-combined-ca-bundle\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.501044 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-etc-swift\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.501102 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-log-httpd\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.501253 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-public-tls-certs\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.501309 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l28ct\" (UniqueName: \"kubernetes.io/projected/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-kube-api-access-l28ct\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.507405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-run-httpd\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.507430 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-log-httpd\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.509638 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-internal-tls-certs\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.511487 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-combined-ca-bundle\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.512477 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-etc-swift\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.513900 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-public-tls-certs\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.516310 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-config-data\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.524778 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l28ct\" (UniqueName: \"kubernetes.io/projected/f7542699-9beb-4966-b1e4-b3c3cb9b42ff-kube-api-access-l28ct\") pod \"swift-proxy-749b6794b5-k8rw7\" (UID: \"f7542699-9beb-4966-b1e4-b3c3cb9b42ff\") " pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.532920 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.181:3000/\": read tcp 10.217.0.2:35624->10.217.0.181:3000: read: connection reset by peer" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.624572 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.812796 4799 generic.go:334] "Generic (PLEG): container finished" podID="f460180e-2550-4286-ae68-85d752d3a3a3" containerID="c7091f62fc43bcecafa9356507579c6eb65acb4d37d20ba7f981b82ad72d9e7d" exitCode=0 Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.812831 4799 generic.go:334] "Generic (PLEG): container finished" podID="f460180e-2550-4286-ae68-85d752d3a3a3" containerID="ee930308a237b29e6aa57ad4b16a4156f73c3528c289a0d170147bb714ff9ea3" exitCode=2 Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.812852 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerDied","Data":"c7091f62fc43bcecafa9356507579c6eb65acb4d37d20ba7f981b82ad72d9e7d"} Jan 21 17:53:30 crc kubenswrapper[4799]: I0121 17:53:30.812878 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerDied","Data":"ee930308a237b29e6aa57ad4b16a4156f73c3528c289a0d170147bb714ff9ea3"} Jan 21 17:53:31 crc kubenswrapper[4799]: I0121 17:53:31.835727 4799 generic.go:334] "Generic (PLEG): container finished" podID="f460180e-2550-4286-ae68-85d752d3a3a3" containerID="3cc1bd6b58b490cb895f45e1aedf80494e8157a3d8bce95caef1f4ce0d5d933e" exitCode=0 Jan 21 17:53:31 crc kubenswrapper[4799]: I0121 17:53:31.835791 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerDied","Data":"3cc1bd6b58b490cb895f45e1aedf80494e8157a3d8bce95caef1f4ce0d5d933e"} Jan 21 17:53:33 crc kubenswrapper[4799]: I0121 17:53:33.870605 4799 generic.go:334] "Generic (PLEG): container finished" podID="f460180e-2550-4286-ae68-85d752d3a3a3" containerID="36846e56ece6d0c4b28e55e88c26357a286f618d878556c6e3c7d54caaadca87" exitCode=0 Jan 21 17:53:33 crc kubenswrapper[4799]: I0121 17:53:33.870748 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerDied","Data":"36846e56ece6d0c4b28e55e88c26357a286f618d878556c6e3c7d54caaadca87"} Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.578649 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.770215 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq66g\" (UniqueName: \"kubernetes.io/projected/f460180e-2550-4286-ae68-85d752d3a3a3-kube-api-access-xq66g\") pod \"f460180e-2550-4286-ae68-85d752d3a3a3\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.770270 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-log-httpd\") pod \"f460180e-2550-4286-ae68-85d752d3a3a3\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.770314 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-run-httpd\") pod \"f460180e-2550-4286-ae68-85d752d3a3a3\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.770404 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-sg-core-conf-yaml\") pod \"f460180e-2550-4286-ae68-85d752d3a3a3\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.770490 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-config-data\") pod \"f460180e-2550-4286-ae68-85d752d3a3a3\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.770518 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-scripts\") pod \"f460180e-2550-4286-ae68-85d752d3a3a3\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.770563 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-combined-ca-bundle\") pod \"f460180e-2550-4286-ae68-85d752d3a3a3\" (UID: \"f460180e-2550-4286-ae68-85d752d3a3a3\") " Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.772130 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f460180e-2550-4286-ae68-85d752d3a3a3" (UID: "f460180e-2550-4286-ae68-85d752d3a3a3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.776618 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f460180e-2550-4286-ae68-85d752d3a3a3" (UID: "f460180e-2550-4286-ae68-85d752d3a3a3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.778349 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f460180e-2550-4286-ae68-85d752d3a3a3-kube-api-access-xq66g" (OuterVolumeSpecName: "kube-api-access-xq66g") pod "f460180e-2550-4286-ae68-85d752d3a3a3" (UID: "f460180e-2550-4286-ae68-85d752d3a3a3"). InnerVolumeSpecName "kube-api-access-xq66g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.778448 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-scripts" (OuterVolumeSpecName: "scripts") pod "f460180e-2550-4286-ae68-85d752d3a3a3" (UID: "f460180e-2550-4286-ae68-85d752d3a3a3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.815649 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f460180e-2550-4286-ae68-85d752d3a3a3" (UID: "f460180e-2550-4286-ae68-85d752d3a3a3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.849405 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-749b6794b5-k8rw7"] Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.875870 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq66g\" (UniqueName: \"kubernetes.io/projected/f460180e-2550-4286-ae68-85d752d3a3a3-kube-api-access-xq66g\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.875903 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.875916 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f460180e-2550-4286-ae68-85d752d3a3a3-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.875945 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.875953 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.896242 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f460180e-2550-4286-ae68-85d752d3a3a3" (UID: "f460180e-2550-4286-ae68-85d752d3a3a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.903649 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ff8dc291-a487-43d0-a494-9496737703ef","Type":"ContainerStarted","Data":"4a488c089d4cbd113f646e2312a5856de727be79a4724dd530f4c4fcb1754b67"} Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.905105 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-749b6794b5-k8rw7" event={"ID":"f7542699-9beb-4966-b1e4-b3c3cb9b42ff","Type":"ContainerStarted","Data":"6150248accb3c5df7ed73c497250fa69b964d913ab4fc7acca6f96f948bb1706"} Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.912982 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f460180e-2550-4286-ae68-85d752d3a3a3","Type":"ContainerDied","Data":"066e87115eb4924cc383405fe3c3c1094f8563f762e8352616ac581652941daa"} Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.913031 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.913055 4799 scope.go:117] "RemoveContainer" containerID="c7091f62fc43bcecafa9356507579c6eb65acb4d37d20ba7f981b82ad72d9e7d" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.928205 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.82680802 podStartE2EDuration="15.928190094s" podCreationTimestamp="2026-01-21 17:53:20 +0000 UTC" firstStartedPulling="2026-01-21 17:53:22.164364801 +0000 UTC m=+1228.790654834" lastFinishedPulling="2026-01-21 17:53:35.265746885 +0000 UTC m=+1241.892036908" observedRunningTime="2026-01-21 17:53:35.92376806 +0000 UTC m=+1242.550058093" watchObservedRunningTime="2026-01-21 17:53:35.928190094 +0000 UTC m=+1242.554480117" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.945073 4799 scope.go:117] "RemoveContainer" containerID="ee930308a237b29e6aa57ad4b16a4156f73c3528c289a0d170147bb714ff9ea3" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.957975 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-config-data" (OuterVolumeSpecName: "config-data") pod "f460180e-2550-4286-ae68-85d752d3a3a3" (UID: "f460180e-2550-4286-ae68-85d752d3a3a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.977748 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.977783 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f460180e-2550-4286-ae68-85d752d3a3a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.987538 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-qd6mw"] Jan 21 17:53:35 crc kubenswrapper[4799]: E0121 17:53:35.988082 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="sg-core" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.988099 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="sg-core" Jan 21 17:53:35 crc kubenswrapper[4799]: E0121 17:53:35.988115 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="ceilometer-notification-agent" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.988122 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="ceilometer-notification-agent" Jan 21 17:53:35 crc kubenswrapper[4799]: E0121 17:53:35.988149 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="ceilometer-central-agent" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.988157 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="ceilometer-central-agent" Jan 21 17:53:35 crc kubenswrapper[4799]: E0121 17:53:35.988184 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="proxy-httpd" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.988190 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="proxy-httpd" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.988384 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="proxy-httpd" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.988400 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="ceilometer-notification-agent" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.988411 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="ceilometer-central-agent" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.988420 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" containerName="sg-core" Jan 21 17:53:35 crc kubenswrapper[4799]: I0121 17:53:35.989179 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.011916 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-qd6mw"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.019489 4799 scope.go:117] "RemoveContainer" containerID="36846e56ece6d0c4b28e55e88c26357a286f618d878556c6e3c7d54caaadca87" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.233517 4799 scope.go:117] "RemoveContainer" containerID="3cc1bd6b58b490cb895f45e1aedf80494e8157a3d8bce95caef1f4ce0d5d933e" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.237397 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-operator-scripts\") pod \"nova-api-db-create-qd6mw\" (UID: \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\") " pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.237441 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf5b6\" (UniqueName: \"kubernetes.io/projected/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-kube-api-access-nf5b6\") pod \"nova-api-db-create-qd6mw\" (UID: \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\") " pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.274361 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-77bgq"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.278922 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.313402 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-14ef-account-create-update-hnpvc"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.315202 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.322801 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.333947 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-77bgq"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.339885 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-operator-scripts\") pod \"nova-api-db-create-qd6mw\" (UID: \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\") " pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.339944 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf5b6\" (UniqueName: \"kubernetes.io/projected/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-kube-api-access-nf5b6\") pod \"nova-api-db-create-qd6mw\" (UID: \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\") " pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.340003 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-operator-scripts\") pod \"nova-api-14ef-account-create-update-hnpvc\" (UID: \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\") " pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.340049 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nprd\" (UniqueName: \"kubernetes.io/projected/f9ee9412-63e5-4fa6-bddd-c362c4241a16-kube-api-access-7nprd\") pod \"nova-cell0-db-create-77bgq\" (UID: \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\") " pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.340090 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9ee9412-63e5-4fa6-bddd-c362c4241a16-operator-scripts\") pod \"nova-cell0-db-create-77bgq\" (UID: \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\") " pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.340172 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6wjw\" (UniqueName: \"kubernetes.io/projected/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-kube-api-access-v6wjw\") pod \"nova-api-14ef-account-create-update-hnpvc\" (UID: \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\") " pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.340873 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-operator-scripts\") pod \"nova-api-db-create-qd6mw\" (UID: \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\") " pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.349525 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-14ef-account-create-update-hnpvc"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.359315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf5b6\" (UniqueName: \"kubernetes.io/projected/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-kube-api-access-nf5b6\") pod \"nova-api-db-create-qd6mw\" (UID: \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\") " pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.443996 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nprd\" (UniqueName: \"kubernetes.io/projected/f9ee9412-63e5-4fa6-bddd-c362c4241a16-kube-api-access-7nprd\") pod \"nova-cell0-db-create-77bgq\" (UID: \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\") " pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.444071 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9ee9412-63e5-4fa6-bddd-c362c4241a16-operator-scripts\") pod \"nova-cell0-db-create-77bgq\" (UID: \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\") " pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.444117 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6wjw\" (UniqueName: \"kubernetes.io/projected/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-kube-api-access-v6wjw\") pod \"nova-api-14ef-account-create-update-hnpvc\" (UID: \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\") " pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.444309 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-operator-scripts\") pod \"nova-api-14ef-account-create-update-hnpvc\" (UID: \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\") " pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.445276 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-operator-scripts\") pod \"nova-api-14ef-account-create-update-hnpvc\" (UID: \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\") " pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.447525 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9ee9412-63e5-4fa6-bddd-c362c4241a16-operator-scripts\") pod \"nova-cell0-db-create-77bgq\" (UID: \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\") " pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.450348 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-4c13-account-create-update-82mwh"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.452323 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.455740 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.476092 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nprd\" (UniqueName: \"kubernetes.io/projected/f9ee9412-63e5-4fa6-bddd-c362c4241a16-kube-api-access-7nprd\") pod \"nova-cell0-db-create-77bgq\" (UID: \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\") " pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.486323 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6wjw\" (UniqueName: \"kubernetes.io/projected/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-kube-api-access-v6wjw\") pod \"nova-api-14ef-account-create-update-hnpvc\" (UID: \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\") " pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.486577 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-rdss5"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.488491 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.514704 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-rdss5"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.528154 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4c13-account-create-update-82mwh"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.539978 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.557165 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.583163 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.605460 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.623260 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.624259 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.630516 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.631263 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.648981 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91716d4a-ab28-4634-bdad-f9e1ba454cc3-operator-scripts\") pod \"nova-cell0-4c13-account-create-update-82mwh\" (UID: \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\") " pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.649746 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-operator-scripts\") pod \"nova-cell1-db-create-rdss5\" (UID: \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\") " pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.649928 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-759k8\" (UniqueName: \"kubernetes.io/projected/91716d4a-ab28-4634-bdad-f9e1ba454cc3-kube-api-access-759k8\") pod \"nova-cell0-4c13-account-create-update-82mwh\" (UID: \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\") " pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.650014 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npvpf\" (UniqueName: \"kubernetes.io/projected/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-kube-api-access-npvpf\") pod \"nova-cell1-db-create-rdss5\" (UID: \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\") " pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.662527 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.686907 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-90ed-account-create-update-fp7tj"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.688350 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.696202 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.722598 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-90ed-account-create-update-fp7tj"] Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.751619 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npvpf\" (UniqueName: \"kubernetes.io/projected/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-kube-api-access-npvpf\") pod \"nova-cell1-db-create-rdss5\" (UID: \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\") " pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.751732 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.751787 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-run-httpd\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.751832 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-log-httpd\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.751871 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91716d4a-ab28-4634-bdad-f9e1ba454cc3-operator-scripts\") pod \"nova-cell0-4c13-account-create-update-82mwh\" (UID: \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\") " pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.752032 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.752135 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-scripts\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.752294 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-config-data\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.752416 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpm62\" (UniqueName: \"kubernetes.io/projected/703e43c0-1694-431a-ba75-a83be15ee561-kube-api-access-gpm62\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.752500 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-operator-scripts\") pod \"nova-cell1-db-create-rdss5\" (UID: \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\") " pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.752546 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-759k8\" (UniqueName: \"kubernetes.io/projected/91716d4a-ab28-4634-bdad-f9e1ba454cc3-kube-api-access-759k8\") pod \"nova-cell0-4c13-account-create-update-82mwh\" (UID: \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\") " pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.753043 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91716d4a-ab28-4634-bdad-f9e1ba454cc3-operator-scripts\") pod \"nova-cell0-4c13-account-create-update-82mwh\" (UID: \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\") " pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.753437 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-operator-scripts\") pod \"nova-cell1-db-create-rdss5\" (UID: \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\") " pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.761032 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.768621 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npvpf\" (UniqueName: \"kubernetes.io/projected/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-kube-api-access-npvpf\") pod \"nova-cell1-db-create-rdss5\" (UID: \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\") " pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.768698 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-759k8\" (UniqueName: \"kubernetes.io/projected/91716d4a-ab28-4634-bdad-f9e1ba454cc3-kube-api-access-759k8\") pod \"nova-cell0-4c13-account-create-update-82mwh\" (UID: \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\") " pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.854245 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfvtr\" (UniqueName: \"kubernetes.io/projected/31e79d66-2da6-47ba-8adf-23e156fa8aae-kube-api-access-nfvtr\") pod \"nova-cell1-90ed-account-create-update-fp7tj\" (UID: \"31e79d66-2da6-47ba-8adf-23e156fa8aae\") " pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.854674 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-run-httpd\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.854731 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-log-httpd\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.854772 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.854793 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-scripts\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.854825 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-config-data\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.854863 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpm62\" (UniqueName: \"kubernetes.io/projected/703e43c0-1694-431a-ba75-a83be15ee561-kube-api-access-gpm62\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.855738 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31e79d66-2da6-47ba-8adf-23e156fa8aae-operator-scripts\") pod \"nova-cell1-90ed-account-create-update-fp7tj\" (UID: \"31e79d66-2da6-47ba-8adf-23e156fa8aae\") " pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.855815 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.856662 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-run-httpd\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.856726 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-log-httpd\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.862466 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-scripts\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.864882 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.868200 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-config-data\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.876239 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.887327 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpm62\" (UniqueName: \"kubernetes.io/projected/703e43c0-1694-431a-ba75-a83be15ee561-kube-api-access-gpm62\") pod \"ceilometer-0\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.923368 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.933043 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-749b6794b5-k8rw7" event={"ID":"f7542699-9beb-4966-b1e4-b3c3cb9b42ff","Type":"ContainerStarted","Data":"e067c727dd501de2d12d1434e3ff7d4eba94fc849fc41b848d33686cee00f28e"} Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.946983 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.958230 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31e79d66-2da6-47ba-8adf-23e156fa8aae-operator-scripts\") pod \"nova-cell1-90ed-account-create-update-fp7tj\" (UID: \"31e79d66-2da6-47ba-8adf-23e156fa8aae\") " pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.958348 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfvtr\" (UniqueName: \"kubernetes.io/projected/31e79d66-2da6-47ba-8adf-23e156fa8aae-kube-api-access-nfvtr\") pod \"nova-cell1-90ed-account-create-update-fp7tj\" (UID: \"31e79d66-2da6-47ba-8adf-23e156fa8aae\") " pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.960573 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31e79d66-2da6-47ba-8adf-23e156fa8aae-operator-scripts\") pod \"nova-cell1-90ed-account-create-update-fp7tj\" (UID: \"31e79d66-2da6-47ba-8adf-23e156fa8aae\") " pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.965411 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:36 crc kubenswrapper[4799]: I0121 17:53:36.983720 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfvtr\" (UniqueName: \"kubernetes.io/projected/31e79d66-2da6-47ba-8adf-23e156fa8aae-kube-api-access-nfvtr\") pod \"nova-cell1-90ed-account-create-update-fp7tj\" (UID: \"31e79d66-2da6-47ba-8adf-23e156fa8aae\") " pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.054835 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.291938 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-14ef-account-create-update-hnpvc"] Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.383350 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-qd6mw"] Jan 21 17:53:37 crc kubenswrapper[4799]: W0121 17:53:37.387426 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b5b0c77_a492_474c_b2b9_c0c2e17868dc.slice/crio-ed77859f5e6ce26dc99cbc1d30beda4a7e503aadb911513e789ae64010558e63 WatchSource:0}: Error finding container ed77859f5e6ce26dc99cbc1d30beda4a7e503aadb911513e789ae64010558e63: Status 404 returned error can't find the container with id ed77859f5e6ce26dc99cbc1d30beda4a7e503aadb911513e789ae64010558e63 Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.549193 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-77bgq"] Jan 21 17:53:37 crc kubenswrapper[4799]: W0121 17:53:37.560026 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9ee9412_63e5_4fa6_bddd_c362c4241a16.slice/crio-0ed9438444e2d4e9a151da4ec2c0c9d32be841a3816e376a6425be284632609d WatchSource:0}: Error finding container 0ed9438444e2d4e9a151da4ec2c0c9d32be841a3816e376a6425be284632609d: Status 404 returned error can't find the container with id 0ed9438444e2d4e9a151da4ec2c0c9d32be841a3816e376a6425be284632609d Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.781526 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-rdss5"] Jan 21 17:53:37 crc kubenswrapper[4799]: W0121 17:53:37.783299 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31e79d66_2da6_47ba_8adf_23e156fa8aae.slice/crio-5a37fbb47f9896ce99f09b6a2d9f15d99042abd0f0ca9e4dac2337e1e3b9488b WatchSource:0}: Error finding container 5a37fbb47f9896ce99f09b6a2d9f15d99042abd0f0ca9e4dac2337e1e3b9488b: Status 404 returned error can't find the container with id 5a37fbb47f9896ce99f09b6a2d9f15d99042abd0f0ca9e4dac2337e1e3b9488b Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.800303 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-90ed-account-create-update-fp7tj"] Jan 21 17:53:37 crc kubenswrapper[4799]: W0121 17:53:37.806185 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcab07ceb_5e7d_400e_92bb_aa4c08af2a56.slice/crio-f833b8eaff9678d577defa6cc9e92485e60bdc81ad4983c4c0f1f67baeb92096 WatchSource:0}: Error finding container f833b8eaff9678d577defa6cc9e92485e60bdc81ad4983c4c0f1f67baeb92096: Status 404 returned error can't find the container with id f833b8eaff9678d577defa6cc9e92485e60bdc81ad4983c4c0f1f67baeb92096 Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.813330 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4c13-account-create-update-82mwh"] Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.825085 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.943627 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-77bgq" event={"ID":"f9ee9412-63e5-4fa6-bddd-c362c4241a16","Type":"ContainerStarted","Data":"29a33c604a25dc8e6f8ec3af1edd8e8c08051bba6a35abc8ab46823e429c8fde"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.943672 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-77bgq" event={"ID":"f9ee9412-63e5-4fa6-bddd-c362c4241a16","Type":"ContainerStarted","Data":"0ed9438444e2d4e9a151da4ec2c0c9d32be841a3816e376a6425be284632609d"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.947681 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" event={"ID":"31e79d66-2da6-47ba-8adf-23e156fa8aae","Type":"ContainerStarted","Data":"5a37fbb47f9896ce99f09b6a2d9f15d99042abd0f0ca9e4dac2337e1e3b9488b"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.952341 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerStarted","Data":"468eca184cd5cb24b6eab4b5af0b9541b50c995d7b08ba8cddfb7eddb86f065f"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.963222 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-749b6794b5-k8rw7" event={"ID":"f7542699-9beb-4966-b1e4-b3c3cb9b42ff","Type":"ContainerStarted","Data":"666923b9a6a340da69644f11ed0a6fad3aca989752a1e31a6a1e70e5302c0acc"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.963896 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.965129 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.967737 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-14ef-account-create-update-hnpvc" event={"ID":"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5","Type":"ContainerStarted","Data":"c7a97f1850d2277f1c980d857c97b7f55bd1bbc0f9b2a72bb0474d587c6da282"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.967766 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-14ef-account-create-update-hnpvc" event={"ID":"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5","Type":"ContainerStarted","Data":"41a3d2cc97448038e41113c591b155062f738dfcf622ea67cd55a127b74cf2d2"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.971271 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-rdss5" event={"ID":"cab07ceb-5e7d-400e-92bb-aa4c08af2a56","Type":"ContainerStarted","Data":"f833b8eaff9678d577defa6cc9e92485e60bdc81ad4983c4c0f1f67baeb92096"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.980719 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qd6mw" event={"ID":"0b5b0c77-a492-474c-b2b9-c0c2e17868dc","Type":"ContainerStarted","Data":"ec42955d5ea5755cf63b92fe7dfa0ad0e817b52ae5570e901b18cc96850a546d"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.980768 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qd6mw" event={"ID":"0b5b0c77-a492-474c-b2b9-c0c2e17868dc","Type":"ContainerStarted","Data":"ed77859f5e6ce26dc99cbc1d30beda4a7e503aadb911513e789ae64010558e63"} Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.981381 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-77bgq" podStartSLOduration=1.981355271 podStartE2EDuration="1.981355271s" podCreationTimestamp="2026-01-21 17:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:37.963686697 +0000 UTC m=+1244.589976720" watchObservedRunningTime="2026-01-21 17:53:37.981355271 +0000 UTC m=+1244.607645294" Jan 21 17:53:37 crc kubenswrapper[4799]: I0121 17:53:37.984311 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4c13-account-create-update-82mwh" event={"ID":"91716d4a-ab28-4634-bdad-f9e1ba454cc3","Type":"ContainerStarted","Data":"6bf9dde760e7b7c4a82080b1930fad56a30ad05207bd95a232132ab77b9e4478"} Jan 21 17:53:38 crc kubenswrapper[4799]: I0121 17:53:38.006056 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-749b6794b5-k8rw7" podStartSLOduration=8.006029441 podStartE2EDuration="8.006029441s" podCreationTimestamp="2026-01-21 17:53:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:37.983968424 +0000 UTC m=+1244.610258447" watchObservedRunningTime="2026-01-21 17:53:38.006029441 +0000 UTC m=+1244.632319474" Jan 21 17:53:38 crc kubenswrapper[4799]: I0121 17:53:38.025758 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-qd6mw" podStartSLOduration=3.025734152 podStartE2EDuration="3.025734152s" podCreationTimestamp="2026-01-21 17:53:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:37.997311317 +0000 UTC m=+1244.623601360" watchObservedRunningTime="2026-01-21 17:53:38.025734152 +0000 UTC m=+1244.652024175" Jan 21 17:53:38 crc kubenswrapper[4799]: I0121 17:53:38.040207 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-14ef-account-create-update-hnpvc" podStartSLOduration=2.040182996 podStartE2EDuration="2.040182996s" podCreationTimestamp="2026-01-21 17:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:53:38.020441184 +0000 UTC m=+1244.646731217" watchObservedRunningTime="2026-01-21 17:53:38.040182996 +0000 UTC m=+1244.666473019" Jan 21 17:53:38 crc kubenswrapper[4799]: I0121 17:53:38.215210 4799 scope.go:117] "RemoveContainer" containerID="d3aaffd6bf3f96c9b7d3914da4516463321849f010304db2511463d6ce079929" Jan 21 17:53:38 crc kubenswrapper[4799]: I0121 17:53:38.273896 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f460180e-2550-4286-ae68-85d752d3a3a3" path="/var/lib/kubelet/pods/f460180e-2550-4286-ae68-85d752d3a3a3/volumes" Jan 21 17:53:38 crc kubenswrapper[4799]: I0121 17:53:38.417936 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:38 crc kubenswrapper[4799]: I0121 17:53:38.998176 4799 generic.go:334] "Generic (PLEG): container finished" podID="cab07ceb-5e7d-400e-92bb-aa4c08af2a56" containerID="82d399d63d4a396aa21c315b901b81b5a177ffdc529792cc3bf2dcca8d80591b" exitCode=0 Jan 21 17:53:38 crc kubenswrapper[4799]: I0121 17:53:38.998277 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-rdss5" event={"ID":"cab07ceb-5e7d-400e-92bb-aa4c08af2a56","Type":"ContainerDied","Data":"82d399d63d4a396aa21c315b901b81b5a177ffdc529792cc3bf2dcca8d80591b"} Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.002294 4799 generic.go:334] "Generic (PLEG): container finished" podID="0b5b0c77-a492-474c-b2b9-c0c2e17868dc" containerID="ec42955d5ea5755cf63b92fe7dfa0ad0e817b52ae5570e901b18cc96850a546d" exitCode=0 Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.002397 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qd6mw" event={"ID":"0b5b0c77-a492-474c-b2b9-c0c2e17868dc","Type":"ContainerDied","Data":"ec42955d5ea5755cf63b92fe7dfa0ad0e817b52ae5570e901b18cc96850a546d"} Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.004714 4799 generic.go:334] "Generic (PLEG): container finished" podID="91716d4a-ab28-4634-bdad-f9e1ba454cc3" containerID="2b7ebc821747d7d4e3ea7978e64fcba03894c44c590c4ca7dc476e418d9a3e9c" exitCode=0 Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.004780 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4c13-account-create-update-82mwh" event={"ID":"91716d4a-ab28-4634-bdad-f9e1ba454cc3","Type":"ContainerDied","Data":"2b7ebc821747d7d4e3ea7978e64fcba03894c44c590c4ca7dc476e418d9a3e9c"} Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.007038 4799 generic.go:334] "Generic (PLEG): container finished" podID="f9ee9412-63e5-4fa6-bddd-c362c4241a16" containerID="29a33c604a25dc8e6f8ec3af1edd8e8c08051bba6a35abc8ab46823e429c8fde" exitCode=0 Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.007098 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-77bgq" event={"ID":"f9ee9412-63e5-4fa6-bddd-c362c4241a16","Type":"ContainerDied","Data":"29a33c604a25dc8e6f8ec3af1edd8e8c08051bba6a35abc8ab46823e429c8fde"} Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.009264 4799 generic.go:334] "Generic (PLEG): container finished" podID="31e79d66-2da6-47ba-8adf-23e156fa8aae" containerID="a1a675b49885bd9a1d3215e2ac9b63e051009f55fd5c4a06eb329fd67e09757e" exitCode=0 Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.009325 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" event={"ID":"31e79d66-2da6-47ba-8adf-23e156fa8aae","Type":"ContainerDied","Data":"a1a675b49885bd9a1d3215e2ac9b63e051009f55fd5c4a06eb329fd67e09757e"} Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.011975 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerStarted","Data":"6a669c8913771d2834db14d9653e31001cb4f95bd5dc7b63963f2b041b67eb55"} Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.012014 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerStarted","Data":"fa3e82f6a3cbd4791c27a8a2a5269fff2955afa1f115e6718e1f5b8bfc1f24ac"} Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.014087 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerStarted","Data":"d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329"} Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.043243 4799 generic.go:334] "Generic (PLEG): container finished" podID="1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5" containerID="c7a97f1850d2277f1c980d857c97b7f55bd1bbc0f9b2a72bb0474d587c6da282" exitCode=0 Jan 21 17:53:39 crc kubenswrapper[4799]: I0121 17:53:39.044127 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-14ef-account-create-update-hnpvc" event={"ID":"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5","Type":"ContainerDied","Data":"c7a97f1850d2277f1c980d857c97b7f55bd1bbc0f9b2a72bb0474d587c6da282"} Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.031021 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.043224 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.044279 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.057978 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.072688 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.072923 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-14ef-account-create-update-hnpvc" event={"ID":"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5","Type":"ContainerDied","Data":"41a3d2cc97448038e41113c591b155062f738dfcf622ea67cd55a127b74cf2d2"} Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.072959 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41a3d2cc97448038e41113c591b155062f738dfcf622ea67cd55a127b74cf2d2" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.073018 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-14ef-account-create-update-hnpvc" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.083491 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-rdss5" event={"ID":"cab07ceb-5e7d-400e-92bb-aa4c08af2a56","Type":"ContainerDied","Data":"f833b8eaff9678d577defa6cc9e92485e60bdc81ad4983c4c0f1f67baeb92096"} Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.083556 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f833b8eaff9678d577defa6cc9e92485e60bdc81ad4983c4c0f1f67baeb92096" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.083623 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-rdss5" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.084715 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.086792 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qd6mw" event={"ID":"0b5b0c77-a492-474c-b2b9-c0c2e17868dc","Type":"ContainerDied","Data":"ed77859f5e6ce26dc99cbc1d30beda4a7e503aadb911513e789ae64010558e63"} Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.086838 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed77859f5e6ce26dc99cbc1d30beda4a7e503aadb911513e789ae64010558e63" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.086908 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qd6mw" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.088628 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4c13-account-create-update-82mwh" event={"ID":"91716d4a-ab28-4634-bdad-f9e1ba454cc3","Type":"ContainerDied","Data":"6bf9dde760e7b7c4a82080b1930fad56a30ad05207bd95a232132ab77b9e4478"} Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.088654 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6bf9dde760e7b7c4a82080b1930fad56a30ad05207bd95a232132ab77b9e4478" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.088692 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4c13-account-create-update-82mwh" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.090930 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-77bgq" event={"ID":"f9ee9412-63e5-4fa6-bddd-c362c4241a16","Type":"ContainerDied","Data":"0ed9438444e2d4e9a151da4ec2c0c9d32be841a3816e376a6425be284632609d"} Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.090951 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ed9438444e2d4e9a151da4ec2c0c9d32be841a3816e376a6425be284632609d" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.090996 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-77bgq" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.092737 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" event={"ID":"31e79d66-2da6-47ba-8adf-23e156fa8aae","Type":"ContainerDied","Data":"5a37fbb47f9896ce99f09b6a2d9f15d99042abd0f0ca9e4dac2337e1e3b9488b"} Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.092759 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a37fbb47f9896ce99f09b6a2d9f15d99042abd0f0ca9e4dac2337e1e3b9488b" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.092798 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-90ed-account-create-update-fp7tj" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100006 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-operator-scripts\") pod \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\" (UID: \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100082 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9ee9412-63e5-4fa6-bddd-c362c4241a16-operator-scripts\") pod \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\" (UID: \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100238 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nf5b6\" (UniqueName: \"kubernetes.io/projected/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-kube-api-access-nf5b6\") pod \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\" (UID: \"0b5b0c77-a492-474c-b2b9-c0c2e17868dc\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100441 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-operator-scripts\") pod \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\" (UID: \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100475 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91716d4a-ab28-4634-bdad-f9e1ba454cc3-operator-scripts\") pod \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\" (UID: \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100519 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npvpf\" (UniqueName: \"kubernetes.io/projected/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-kube-api-access-npvpf\") pod \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\" (UID: \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100601 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6wjw\" (UniqueName: \"kubernetes.io/projected/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-kube-api-access-v6wjw\") pod \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\" (UID: \"1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100672 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31e79d66-2da6-47ba-8adf-23e156fa8aae-operator-scripts\") pod \"31e79d66-2da6-47ba-8adf-23e156fa8aae\" (UID: \"31e79d66-2da6-47ba-8adf-23e156fa8aae\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100740 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-759k8\" (UniqueName: \"kubernetes.io/projected/91716d4a-ab28-4634-bdad-f9e1ba454cc3-kube-api-access-759k8\") pod \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\" (UID: \"91716d4a-ab28-4634-bdad-f9e1ba454cc3\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100780 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nprd\" (UniqueName: \"kubernetes.io/projected/f9ee9412-63e5-4fa6-bddd-c362c4241a16-kube-api-access-7nprd\") pod \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\" (UID: \"f9ee9412-63e5-4fa6-bddd-c362c4241a16\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100843 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfvtr\" (UniqueName: \"kubernetes.io/projected/31e79d66-2da6-47ba-8adf-23e156fa8aae-kube-api-access-nfvtr\") pod \"31e79d66-2da6-47ba-8adf-23e156fa8aae\" (UID: \"31e79d66-2da6-47ba-8adf-23e156fa8aae\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.100868 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-operator-scripts\") pod \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\" (UID: \"cab07ceb-5e7d-400e-92bb-aa4c08af2a56\") " Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.103332 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cab07ceb-5e7d-400e-92bb-aa4c08af2a56" (UID: "cab07ceb-5e7d-400e-92bb-aa4c08af2a56"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.108296 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5" (UID: "1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.108316 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31e79d66-2da6-47ba-8adf-23e156fa8aae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "31e79d66-2da6-47ba-8adf-23e156fa8aae" (UID: "31e79d66-2da6-47ba-8adf-23e156fa8aae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.108796 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0b5b0c77-a492-474c-b2b9-c0c2e17868dc" (UID: "0b5b0c77-a492-474c-b2b9-c0c2e17868dc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.110203 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9ee9412-63e5-4fa6-bddd-c362c4241a16-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f9ee9412-63e5-4fa6-bddd-c362c4241a16" (UID: "f9ee9412-63e5-4fa6-bddd-c362c4241a16"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.111536 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91716d4a-ab28-4634-bdad-f9e1ba454cc3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91716d4a-ab28-4634-bdad-f9e1ba454cc3" (UID: "91716d4a-ab28-4634-bdad-f9e1ba454cc3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.113474 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91716d4a-ab28-4634-bdad-f9e1ba454cc3-kube-api-access-759k8" (OuterVolumeSpecName: "kube-api-access-759k8") pod "91716d4a-ab28-4634-bdad-f9e1ba454cc3" (UID: "91716d4a-ab28-4634-bdad-f9e1ba454cc3"). InnerVolumeSpecName "kube-api-access-759k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.116898 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31e79d66-2da6-47ba-8adf-23e156fa8aae-kube-api-access-nfvtr" (OuterVolumeSpecName: "kube-api-access-nfvtr") pod "31e79d66-2da6-47ba-8adf-23e156fa8aae" (UID: "31e79d66-2da6-47ba-8adf-23e156fa8aae"). InnerVolumeSpecName "kube-api-access-nfvtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.118051 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-kube-api-access-nf5b6" (OuterVolumeSpecName: "kube-api-access-nf5b6") pod "0b5b0c77-a492-474c-b2b9-c0c2e17868dc" (UID: "0b5b0c77-a492-474c-b2b9-c0c2e17868dc"). InnerVolumeSpecName "kube-api-access-nf5b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.120544 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-kube-api-access-npvpf" (OuterVolumeSpecName: "kube-api-access-npvpf") pod "cab07ceb-5e7d-400e-92bb-aa4c08af2a56" (UID: "cab07ceb-5e7d-400e-92bb-aa4c08af2a56"). InnerVolumeSpecName "kube-api-access-npvpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.138427 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-kube-api-access-v6wjw" (OuterVolumeSpecName: "kube-api-access-v6wjw") pod "1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5" (UID: "1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5"). InnerVolumeSpecName "kube-api-access-v6wjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.148424 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9ee9412-63e5-4fa6-bddd-c362c4241a16-kube-api-access-7nprd" (OuterVolumeSpecName: "kube-api-access-7nprd") pod "f9ee9412-63e5-4fa6-bddd-c362c4241a16" (UID: "f9ee9412-63e5-4fa6-bddd-c362c4241a16"). InnerVolumeSpecName "kube-api-access-7nprd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203818 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfvtr\" (UniqueName: \"kubernetes.io/projected/31e79d66-2da6-47ba-8adf-23e156fa8aae-kube-api-access-nfvtr\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203877 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203892 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203904 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f9ee9412-63e5-4fa6-bddd-c362c4241a16-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203916 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nf5b6\" (UniqueName: \"kubernetes.io/projected/0b5b0c77-a492-474c-b2b9-c0c2e17868dc-kube-api-access-nf5b6\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203929 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203939 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91716d4a-ab28-4634-bdad-f9e1ba454cc3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203957 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npvpf\" (UniqueName: \"kubernetes.io/projected/cab07ceb-5e7d-400e-92bb-aa4c08af2a56-kube-api-access-npvpf\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203969 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6wjw\" (UniqueName: \"kubernetes.io/projected/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5-kube-api-access-v6wjw\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203980 4799 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31e79d66-2da6-47ba-8adf-23e156fa8aae-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.203993 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-759k8\" (UniqueName: \"kubernetes.io/projected/91716d4a-ab28-4634-bdad-f9e1ba454cc3-kube-api-access-759k8\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:41 crc kubenswrapper[4799]: I0121 17:53:41.204004 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nprd\" (UniqueName: \"kubernetes.io/projected/f9ee9412-63e5-4fa6-bddd-c362c4241a16-kube-api-access-7nprd\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:42 crc kubenswrapper[4799]: I0121 17:53:42.130823 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerStarted","Data":"821d1a9ad8c6571d720de6689ca9e31ac2fb118c4722604d3604d22a0d87a7c0"} Jan 21 17:53:43 crc kubenswrapper[4799]: I0121 17:53:43.364736 4799 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod8c8a46b8-4c1b-413d-a085-fa3994505174"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod8c8a46b8-4c1b-413d-a085-fa3994505174] : Timed out while waiting for systemd to remove kubepods-besteffort-pod8c8a46b8_4c1b_413d_a085_fa3994505174.slice" Jan 21 17:53:43 crc kubenswrapper[4799]: I0121 17:53:43.818595 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:43 crc kubenswrapper[4799]: I0121 17:53:43.818649 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:43 crc kubenswrapper[4799]: E0121 17:53:43.862261 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: EOF, stdout: , stderr: , exit code -1" containerID="d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Jan 21 17:53:43 crc kubenswrapper[4799]: E0121 17:53:43.863091 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329 is running failed: container process not found" containerID="d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Jan 21 17:53:43 crc kubenswrapper[4799]: E0121 17:53:43.863631 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329 is running failed: container process not found" containerID="d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Jan 21 17:53:43 crc kubenswrapper[4799]: E0121 17:53:43.863711 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329 is running failed: container process not found" probeType="Startup" pod="openstack/watcher-decision-engine-0" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:53:44 crc kubenswrapper[4799]: I0121 17:53:44.152685 4799 generic.go:334] "Generic (PLEG): container finished" podID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerID="d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329" exitCode=1 Jan 21 17:53:44 crc kubenswrapper[4799]: I0121 17:53:44.152734 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerDied","Data":"d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329"} Jan 21 17:53:44 crc kubenswrapper[4799]: I0121 17:53:44.152811 4799 scope.go:117] "RemoveContainer" containerID="d3aaffd6bf3f96c9b7d3914da4516463321849f010304db2511463d6ce079929" Jan 21 17:53:44 crc kubenswrapper[4799]: I0121 17:53:44.155002 4799 scope.go:117] "RemoveContainer" containerID="d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329" Jan 21 17:53:44 crc kubenswrapper[4799]: E0121 17:53:44.155662 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(e3555046-24d9-4700-bdb8-0a09c35f651a)\"" pod="openstack/watcher-decision-engine-0" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" Jan 21 17:53:45 crc kubenswrapper[4799]: I0121 17:53:45.637445 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:45 crc kubenswrapper[4799]: I0121 17:53:45.643969 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-749b6794b5-k8rw7" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.178391 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="ceilometer-central-agent" containerID="cri-o://fa3e82f6a3cbd4791c27a8a2a5269fff2955afa1f115e6718e1f5b8bfc1f24ac" gracePeriod=30 Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.179232 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="sg-core" containerID="cri-o://821d1a9ad8c6571d720de6689ca9e31ac2fb118c4722604d3604d22a0d87a7c0" gracePeriod=30 Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.179239 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="ceilometer-notification-agent" containerID="cri-o://6a669c8913771d2834db14d9653e31001cb4f95bd5dc7b63963f2b041b67eb55" gracePeriod=30 Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.179256 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="proxy-httpd" containerID="cri-o://4eed2c1303b0f4ca4bc7e9c4a47122e36bf84503773b604aa7166cc9574ea291" gracePeriod=30 Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.179327 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerStarted","Data":"4eed2c1303b0f4ca4bc7e9c4a47122e36bf84503773b604aa7166cc9574ea291"} Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.179651 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.220542 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.5607996379999998 podStartE2EDuration="10.22051438s" podCreationTimestamp="2026-01-21 17:53:36 +0000 UTC" firstStartedPulling="2026-01-21 17:53:37.840744808 +0000 UTC m=+1244.467034831" lastFinishedPulling="2026-01-21 17:53:45.50045953 +0000 UTC m=+1252.126749573" observedRunningTime="2026-01-21 17:53:46.202207248 +0000 UTC m=+1252.828497281" watchObservedRunningTime="2026-01-21 17:53:46.22051438 +0000 UTC m=+1252.846804413" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.823798 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-w4p6b"] Jan 21 17:53:46 crc kubenswrapper[4799]: E0121 17:53:46.824543 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824557 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: E0121 17:53:46.824567 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91716d4a-ab28-4634-bdad-f9e1ba454cc3" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824573 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="91716d4a-ab28-4634-bdad-f9e1ba454cc3" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: E0121 17:53:46.824584 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e79d66-2da6-47ba-8adf-23e156fa8aae" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824591 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e79d66-2da6-47ba-8adf-23e156fa8aae" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: E0121 17:53:46.824597 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b5b0c77-a492-474c-b2b9-c0c2e17868dc" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824603 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b5b0c77-a492-474c-b2b9-c0c2e17868dc" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: E0121 17:53:46.824611 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9ee9412-63e5-4fa6-bddd-c362c4241a16" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824616 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9ee9412-63e5-4fa6-bddd-c362c4241a16" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: E0121 17:53:46.824630 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cab07ceb-5e7d-400e-92bb-aa4c08af2a56" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824636 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cab07ceb-5e7d-400e-92bb-aa4c08af2a56" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824834 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="31e79d66-2da6-47ba-8adf-23e156fa8aae" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824854 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cab07ceb-5e7d-400e-92bb-aa4c08af2a56" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824863 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="91716d4a-ab28-4634-bdad-f9e1ba454cc3" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824874 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9ee9412-63e5-4fa6-bddd-c362c4241a16" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824885 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b5b0c77-a492-474c-b2b9-c0c2e17868dc" containerName="mariadb-database-create" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.824892 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5" containerName="mariadb-account-create-update" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.825634 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.831096 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.831282 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.831430 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xzc95" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.844649 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-w4p6b"] Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.964934 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.964992 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf7fj\" (UniqueName: \"kubernetes.io/projected/310c250a-8e8a-402c-84d6-5fb50340d73d-kube-api-access-lf7fj\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.965202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-config-data\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:46 crc kubenswrapper[4799]: I0121 17:53:46.965313 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-scripts\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.067257 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-config-data\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.067411 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-scripts\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.067532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.067576 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf7fj\" (UniqueName: \"kubernetes.io/projected/310c250a-8e8a-402c-84d6-5fb50340d73d-kube-api-access-lf7fj\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.073265 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-config-data\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.073258 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.081828 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-scripts\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.089670 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf7fj\" (UniqueName: \"kubernetes.io/projected/310c250a-8e8a-402c-84d6-5fb50340d73d-kube-api-access-lf7fj\") pod \"nova-cell0-conductor-db-sync-w4p6b\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.156176 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.193886 4799 generic.go:334] "Generic (PLEG): container finished" podID="703e43c0-1694-431a-ba75-a83be15ee561" containerID="4eed2c1303b0f4ca4bc7e9c4a47122e36bf84503773b604aa7166cc9574ea291" exitCode=0 Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.193919 4799 generic.go:334] "Generic (PLEG): container finished" podID="703e43c0-1694-431a-ba75-a83be15ee561" containerID="821d1a9ad8c6571d720de6689ca9e31ac2fb118c4722604d3604d22a0d87a7c0" exitCode=2 Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.193943 4799 generic.go:334] "Generic (PLEG): container finished" podID="703e43c0-1694-431a-ba75-a83be15ee561" containerID="6a669c8913771d2834db14d9653e31001cb4f95bd5dc7b63963f2b041b67eb55" exitCode=0 Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.193950 4799 generic.go:334] "Generic (PLEG): container finished" podID="703e43c0-1694-431a-ba75-a83be15ee561" containerID="fa3e82f6a3cbd4791c27a8a2a5269fff2955afa1f115e6718e1f5b8bfc1f24ac" exitCode=0 Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.193962 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerDied","Data":"4eed2c1303b0f4ca4bc7e9c4a47122e36bf84503773b604aa7166cc9574ea291"} Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.194025 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerDied","Data":"821d1a9ad8c6571d720de6689ca9e31ac2fb118c4722604d3604d22a0d87a7c0"} Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.194044 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerDied","Data":"6a669c8913771d2834db14d9653e31001cb4f95bd5dc7b63963f2b041b67eb55"} Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.194056 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerDied","Data":"fa3e82f6a3cbd4791c27a8a2a5269fff2955afa1f115e6718e1f5b8bfc1f24ac"} Jan 21 17:53:47 crc kubenswrapper[4799]: I0121 17:53:47.691529 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-w4p6b"] Jan 21 17:53:47 crc kubenswrapper[4799]: W0121 17:53:47.693696 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod310c250a_8e8a_402c_84d6_5fb50340d73d.slice/crio-69e31d820f031e7077c0cfd947bed5936546fb14a303963d7a88a675da781c50 WatchSource:0}: Error finding container 69e31d820f031e7077c0cfd947bed5936546fb14a303963d7a88a675da781c50: Status 404 returned error can't find the container with id 69e31d820f031e7077c0cfd947bed5936546fb14a303963d7a88a675da781c50 Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.064378 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.221373 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"703e43c0-1694-431a-ba75-a83be15ee561","Type":"ContainerDied","Data":"468eca184cd5cb24b6eab4b5af0b9541b50c995d7b08ba8cddfb7eddb86f065f"} Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222102 4799 scope.go:117] "RemoveContainer" containerID="4eed2c1303b0f4ca4bc7e9c4a47122e36bf84503773b604aa7166cc9574ea291" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222367 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-config-data\") pod \"703e43c0-1694-431a-ba75-a83be15ee561\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222486 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-run-httpd\") pod \"703e43c0-1694-431a-ba75-a83be15ee561\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222558 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-log-httpd\") pod \"703e43c0-1694-431a-ba75-a83be15ee561\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222596 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpm62\" (UniqueName: \"kubernetes.io/projected/703e43c0-1694-431a-ba75-a83be15ee561-kube-api-access-gpm62\") pod \"703e43c0-1694-431a-ba75-a83be15ee561\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222653 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-combined-ca-bundle\") pod \"703e43c0-1694-431a-ba75-a83be15ee561\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222692 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-sg-core-conf-yaml\") pod \"703e43c0-1694-431a-ba75-a83be15ee561\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.222731 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-scripts\") pod \"703e43c0-1694-431a-ba75-a83be15ee561\" (UID: \"703e43c0-1694-431a-ba75-a83be15ee561\") " Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.224048 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "703e43c0-1694-431a-ba75-a83be15ee561" (UID: "703e43c0-1694-431a-ba75-a83be15ee561"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.224080 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "703e43c0-1694-431a-ba75-a83be15ee561" (UID: "703e43c0-1694-431a-ba75-a83be15ee561"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.224854 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" event={"ID":"310c250a-8e8a-402c-84d6-5fb50340d73d","Type":"ContainerStarted","Data":"69e31d820f031e7077c0cfd947bed5936546fb14a303963d7a88a675da781c50"} Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.232744 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-scripts" (OuterVolumeSpecName: "scripts") pod "703e43c0-1694-431a-ba75-a83be15ee561" (UID: "703e43c0-1694-431a-ba75-a83be15ee561"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.232842 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/703e43c0-1694-431a-ba75-a83be15ee561-kube-api-access-gpm62" (OuterVolumeSpecName: "kube-api-access-gpm62") pod "703e43c0-1694-431a-ba75-a83be15ee561" (UID: "703e43c0-1694-431a-ba75-a83be15ee561"). InnerVolumeSpecName "kube-api-access-gpm62". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.272990 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "703e43c0-1694-431a-ba75-a83be15ee561" (UID: "703e43c0-1694-431a-ba75-a83be15ee561"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.302447 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "703e43c0-1694-431a-ba75-a83be15ee561" (UID: "703e43c0-1694-431a-ba75-a83be15ee561"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.325878 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.325926 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/703e43c0-1694-431a-ba75-a83be15ee561-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.325980 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpm62\" (UniqueName: \"kubernetes.io/projected/703e43c0-1694-431a-ba75-a83be15ee561-kube-api-access-gpm62\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.326014 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.326029 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.326041 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.326008 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-config-data" (OuterVolumeSpecName: "config-data") pod "703e43c0-1694-431a-ba75-a83be15ee561" (UID: "703e43c0-1694-431a-ba75-a83be15ee561"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.370269 4799 scope.go:117] "RemoveContainer" containerID="821d1a9ad8c6571d720de6689ca9e31ac2fb118c4722604d3604d22a0d87a7c0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.397436 4799 scope.go:117] "RemoveContainer" containerID="6a669c8913771d2834db14d9653e31001cb4f95bd5dc7b63963f2b041b67eb55" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.417208 4799 scope.go:117] "RemoveContainer" containerID="fa3e82f6a3cbd4791c27a8a2a5269fff2955afa1f115e6718e1f5b8bfc1f24ac" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.427903 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/703e43c0-1694-431a-ba75-a83be15ee561-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.572079 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.591904 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.606867 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:48 crc kubenswrapper[4799]: E0121 17:53:48.607519 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="ceilometer-central-agent" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.607555 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="ceilometer-central-agent" Jan 21 17:53:48 crc kubenswrapper[4799]: E0121 17:53:48.607602 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="sg-core" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.607619 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="sg-core" Jan 21 17:53:48 crc kubenswrapper[4799]: E0121 17:53:48.607641 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="ceilometer-notification-agent" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.607650 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="ceilometer-notification-agent" Jan 21 17:53:48 crc kubenswrapper[4799]: E0121 17:53:48.607681 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="proxy-httpd" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.607691 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="proxy-httpd" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.607949 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="sg-core" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.607976 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="ceilometer-notification-agent" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.607989 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="ceilometer-central-agent" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.608003 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="703e43c0-1694-431a-ba75-a83be15ee561" containerName="proxy-httpd" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.618355 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.626843 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.631924 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.634256 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.733932 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-config-data\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.734110 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-run-httpd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.734395 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-scripts\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.734456 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcbtd\" (UniqueName: \"kubernetes.io/projected/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-kube-api-access-zcbtd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.734653 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.734714 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.734824 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-log-httpd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.838556 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.838654 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.838788 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-log-httpd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.838889 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-config-data\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.838994 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-run-httpd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.839231 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-scripts\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.839316 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcbtd\" (UniqueName: \"kubernetes.io/projected/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-kube-api-access-zcbtd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.839328 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-log-httpd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.840619 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-run-httpd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.845542 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-scripts\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.845607 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-config-data\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.846041 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.854005 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.866433 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcbtd\" (UniqueName: \"kubernetes.io/projected/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-kube-api-access-zcbtd\") pod \"ceilometer-0\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " pod="openstack/ceilometer-0" Jan 21 17:53:48 crc kubenswrapper[4799]: I0121 17:53:48.944767 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:53:49 crc kubenswrapper[4799]: I0121 17:53:49.433284 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:49 crc kubenswrapper[4799]: W0121 17:53:49.442349 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83fb2ae7_cdbf_4f0c_9550_c937bc8f75c9.slice/crio-5597f499616e79910a649e3f20b64613ebc0eba2d875bf265b6166968a0372f1 WatchSource:0}: Error finding container 5597f499616e79910a649e3f20b64613ebc0eba2d875bf265b6166968a0372f1: Status 404 returned error can't find the container with id 5597f499616e79910a649e3f20b64613ebc0eba2d875bf265b6166968a0372f1 Jan 21 17:53:50 crc kubenswrapper[4799]: I0121 17:53:50.219797 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="703e43c0-1694-431a-ba75-a83be15ee561" path="/var/lib/kubelet/pods/703e43c0-1694-431a-ba75-a83be15ee561/volumes" Jan 21 17:53:50 crc kubenswrapper[4799]: I0121 17:53:50.270732 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerStarted","Data":"486a7dec72ed50a4449b74c04742b0d27055215f15973a79cc5ad76b5fea5863"} Jan 21 17:53:50 crc kubenswrapper[4799]: I0121 17:53:50.271028 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerStarted","Data":"8ee95166b28730dc2315e9fb814f9c3ce40b1eec3d6030a1f60cfe94b5a99fc2"} Jan 21 17:53:50 crc kubenswrapper[4799]: I0121 17:53:50.271039 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerStarted","Data":"5597f499616e79910a649e3f20b64613ebc0eba2d875bf265b6166968a0372f1"} Jan 21 17:53:51 crc kubenswrapper[4799]: I0121 17:53:51.255586 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:53:51 crc kubenswrapper[4799]: I0121 17:53:51.303191 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerStarted","Data":"e91f636028fa1288e6f4ed94df158446264ca2addab8a0b1933bace1e007f1aa"} Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.345395 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerStarted","Data":"f3d24e824c5b4fd41d456d083ba8db94d0ffd69ef76103d74e8e82085491cef6"} Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.345557 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="ceilometer-central-agent" containerID="cri-o://8ee95166b28730dc2315e9fb814f9c3ce40b1eec3d6030a1f60cfe94b5a99fc2" gracePeriod=30 Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.345615 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="ceilometer-notification-agent" containerID="cri-o://486a7dec72ed50a4449b74c04742b0d27055215f15973a79cc5ad76b5fea5863" gracePeriod=30 Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.345600 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="proxy-httpd" containerID="cri-o://f3d24e824c5b4fd41d456d083ba8db94d0ffd69ef76103d74e8e82085491cef6" gracePeriod=30 Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.345645 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="sg-core" containerID="cri-o://e91f636028fa1288e6f4ed94df158446264ca2addab8a0b1933bace1e007f1aa" gracePeriod=30 Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.345967 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.370349 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.775500992 podStartE2EDuration="5.370327999s" podCreationTimestamp="2026-01-21 17:53:48 +0000 UTC" firstStartedPulling="2026-01-21 17:53:49.445007769 +0000 UTC m=+1256.071297792" lastFinishedPulling="2026-01-21 17:53:52.039834776 +0000 UTC m=+1258.666124799" observedRunningTime="2026-01-21 17:53:53.366740959 +0000 UTC m=+1259.993030992" watchObservedRunningTime="2026-01-21 17:53:53.370327999 +0000 UTC m=+1259.996618022" Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.818674 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:53:53 crc kubenswrapper[4799]: I0121 17:53:53.819727 4799 scope.go:117] "RemoveContainer" containerID="d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329" Jan 21 17:53:53 crc kubenswrapper[4799]: E0121 17:53:53.820194 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(e3555046-24d9-4700-bdb8-0a09c35f651a)\"" pod="openstack/watcher-decision-engine-0" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" Jan 21 17:53:54 crc kubenswrapper[4799]: I0121 17:53:54.357191 4799 generic.go:334] "Generic (PLEG): container finished" podID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerID="f3d24e824c5b4fd41d456d083ba8db94d0ffd69ef76103d74e8e82085491cef6" exitCode=0 Jan 21 17:53:54 crc kubenswrapper[4799]: I0121 17:53:54.357231 4799 generic.go:334] "Generic (PLEG): container finished" podID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerID="e91f636028fa1288e6f4ed94df158446264ca2addab8a0b1933bace1e007f1aa" exitCode=2 Jan 21 17:53:54 crc kubenswrapper[4799]: I0121 17:53:54.357239 4799 generic.go:334] "Generic (PLEG): container finished" podID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerID="486a7dec72ed50a4449b74c04742b0d27055215f15973a79cc5ad76b5fea5863" exitCode=0 Jan 21 17:53:54 crc kubenswrapper[4799]: I0121 17:53:54.357260 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerDied","Data":"f3d24e824c5b4fd41d456d083ba8db94d0ffd69ef76103d74e8e82085491cef6"} Jan 21 17:53:54 crc kubenswrapper[4799]: I0121 17:53:54.357287 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerDied","Data":"e91f636028fa1288e6f4ed94df158446264ca2addab8a0b1933bace1e007f1aa"} Jan 21 17:53:54 crc kubenswrapper[4799]: I0121 17:53:54.357298 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerDied","Data":"486a7dec72ed50a4449b74c04742b0d27055215f15973a79cc5ad76b5fea5863"} Jan 21 17:53:55 crc kubenswrapper[4799]: I0121 17:53:55.971721 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:53:55 crc kubenswrapper[4799]: I0121 17:53:55.971973 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:54:00 crc kubenswrapper[4799]: I0121 17:54:00.373724 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:54:00 crc kubenswrapper[4799]: I0121 17:54:00.374574 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerName="glance-log" containerID="cri-o://8926411eb04e63565c5b583071005e198c2fbad3bfe9f0d039f0483c2712051f" gracePeriod=30 Jan 21 17:54:00 crc kubenswrapper[4799]: I0121 17:54:00.374754 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerName="glance-httpd" containerID="cri-o://a6183f03da598d687e7dcc3db8033af4efa220d4e56c96b8925d84fc692ea091" gracePeriod=30 Jan 21 17:54:00 crc kubenswrapper[4799]: I0121 17:54:00.504386 4799 generic.go:334] "Generic (PLEG): container finished" podID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerID="8926411eb04e63565c5b583071005e198c2fbad3bfe9f0d039f0483c2712051f" exitCode=143 Jan 21 17:54:00 crc kubenswrapper[4799]: I0121 17:54:00.504485 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21e4d907-4ebb-4839-ab96-454bd4d08954","Type":"ContainerDied","Data":"8926411eb04e63565c5b583071005e198c2fbad3bfe9f0d039f0483c2712051f"} Jan 21 17:54:01 crc kubenswrapper[4799]: I0121 17:54:01.521471 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" event={"ID":"310c250a-8e8a-402c-84d6-5fb50340d73d","Type":"ContainerStarted","Data":"10fbb7268ce91cc9fda07cec442f102c130869426157347b4a3061eb4b7a5461"} Jan 21 17:54:01 crc kubenswrapper[4799]: I0121 17:54:01.545250 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" podStartSLOduration=2.127837537 podStartE2EDuration="15.545228321s" podCreationTimestamp="2026-01-21 17:53:46 +0000 UTC" firstStartedPulling="2026-01-21 17:53:47.696436401 +0000 UTC m=+1254.322726424" lastFinishedPulling="2026-01-21 17:54:01.113827184 +0000 UTC m=+1267.740117208" observedRunningTime="2026-01-21 17:54:01.539888032 +0000 UTC m=+1268.166178055" watchObservedRunningTime="2026-01-21 17:54:01.545228321 +0000 UTC m=+1268.171518344" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.321945 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.322242 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5ff098a1-6561-4017-98c9-3014f678faad" containerName="glance-log" containerID="cri-o://919a34b4723f6e4a231589d646cca27f8272d69e83efd3c9631383fc2e9194e2" gracePeriod=30 Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.322337 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5ff098a1-6561-4017-98c9-3014f678faad" containerName="glance-httpd" containerID="cri-o://9f0fc1351f8528068db150962a8958eda5dbc0473e91ef43e13f35d02bb6ca78" gracePeriod=30 Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.546977 4799 generic.go:334] "Generic (PLEG): container finished" podID="5ff098a1-6561-4017-98c9-3014f678faad" containerID="919a34b4723f6e4a231589d646cca27f8272d69e83efd3c9631383fc2e9194e2" exitCode=143 Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.548089 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ff098a1-6561-4017-98c9-3014f678faad","Type":"ContainerDied","Data":"919a34b4723f6e4a231589d646cca27f8272d69e83efd3c9631383fc2e9194e2"} Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.570295 4799 generic.go:334] "Generic (PLEG): container finished" podID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerID="a6183f03da598d687e7dcc3db8033af4efa220d4e56c96b8925d84fc692ea091" exitCode=0 Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.571245 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21e4d907-4ebb-4839-ab96-454bd4d08954","Type":"ContainerDied","Data":"a6183f03da598d687e7dcc3db8033af4efa220d4e56c96b8925d84fc692ea091"} Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.571281 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21e4d907-4ebb-4839-ab96-454bd4d08954","Type":"ContainerDied","Data":"9e966b3199c5ea754d6200465d2a4f8631e91d1f0a0042d8d25ad78c7bda2df1"} Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.571295 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e966b3199c5ea754d6200465d2a4f8631e91d1f0a0042d8d25ad78c7bda2df1" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.631905 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.659325 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-httpd-run\") pod \"21e4d907-4ebb-4839-ab96-454bd4d08954\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.659400 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"21e4d907-4ebb-4839-ab96-454bd4d08954\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.659457 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-config-data\") pod \"21e4d907-4ebb-4839-ab96-454bd4d08954\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.659533 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-combined-ca-bundle\") pod \"21e4d907-4ebb-4839-ab96-454bd4d08954\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.659573 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-logs\") pod \"21e4d907-4ebb-4839-ab96-454bd4d08954\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.659607 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-public-tls-certs\") pod \"21e4d907-4ebb-4839-ab96-454bd4d08954\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.659670 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv9q4\" (UniqueName: \"kubernetes.io/projected/21e4d907-4ebb-4839-ab96-454bd4d08954-kube-api-access-bv9q4\") pod \"21e4d907-4ebb-4839-ab96-454bd4d08954\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.659717 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-scripts\") pod \"21e4d907-4ebb-4839-ab96-454bd4d08954\" (UID: \"21e4d907-4ebb-4839-ab96-454bd4d08954\") " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.661409 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "21e4d907-4ebb-4839-ab96-454bd4d08954" (UID: "21e4d907-4ebb-4839-ab96-454bd4d08954"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.667330 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-logs" (OuterVolumeSpecName: "logs") pod "21e4d907-4ebb-4839-ab96-454bd4d08954" (UID: "21e4d907-4ebb-4839-ab96-454bd4d08954"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.667947 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "21e4d907-4ebb-4839-ab96-454bd4d08954" (UID: "21e4d907-4ebb-4839-ab96-454bd4d08954"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.668980 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21e4d907-4ebb-4839-ab96-454bd4d08954-kube-api-access-bv9q4" (OuterVolumeSpecName: "kube-api-access-bv9q4") pod "21e4d907-4ebb-4839-ab96-454bd4d08954" (UID: "21e4d907-4ebb-4839-ab96-454bd4d08954"). InnerVolumeSpecName "kube-api-access-bv9q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.671427 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-scripts" (OuterVolumeSpecName: "scripts") pod "21e4d907-4ebb-4839-ab96-454bd4d08954" (UID: "21e4d907-4ebb-4839-ab96-454bd4d08954"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.741252 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21e4d907-4ebb-4839-ab96-454bd4d08954" (UID: "21e4d907-4ebb-4839-ab96-454bd4d08954"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.762952 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.765319 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.765340 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.765358 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21e4d907-4ebb-4839-ab96-454bd4d08954-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.765371 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv9q4\" (UniqueName: \"kubernetes.io/projected/21e4d907-4ebb-4839-ab96-454bd4d08954-kube-api-access-bv9q4\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.765385 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.783295 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-config-data" (OuterVolumeSpecName: "config-data") pod "21e4d907-4ebb-4839-ab96-454bd4d08954" (UID: "21e4d907-4ebb-4839-ab96-454bd4d08954"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.802914 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "21e4d907-4ebb-4839-ab96-454bd4d08954" (UID: "21e4d907-4ebb-4839-ab96-454bd4d08954"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.865715 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.867083 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.867108 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:02 crc kubenswrapper[4799]: I0121 17:54:02.867135 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21e4d907-4ebb-4839-ab96-454bd4d08954-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.579942 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.616664 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.625014 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.651147 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:54:03 crc kubenswrapper[4799]: E0121 17:54:03.651579 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerName="glance-httpd" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.651597 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerName="glance-httpd" Jan 21 17:54:03 crc kubenswrapper[4799]: E0121 17:54:03.651625 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerName="glance-log" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.651632 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerName="glance-log" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.651820 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerName="glance-log" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.651833 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" containerName="glance-httpd" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.652860 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.655450 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.655815 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.670622 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.687024 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.687094 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.687164 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-logs\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.687232 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbgqt\" (UniqueName: \"kubernetes.io/projected/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-kube-api-access-kbgqt\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.687265 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.687325 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-scripts\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.687374 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.687468 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-config-data\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789019 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbgqt\" (UniqueName: \"kubernetes.io/projected/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-kube-api-access-kbgqt\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789076 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789236 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-scripts\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789346 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789358 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789517 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-config-data\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789552 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789584 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.789626 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-logs\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.790109 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-logs\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.790106 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.796656 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-config-data\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.798811 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-scripts\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.798989 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.807577 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.814083 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbgqt\" (UniqueName: \"kubernetes.io/projected/486557f2-139f-4297-8c6c-9dc9ed6f5cdc-kube-api-access-kbgqt\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.840903 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"486557f2-139f-4297-8c6c-9dc9ed6f5cdc\") " pod="openstack/glance-default-external-api-0" Jan 21 17:54:03 crc kubenswrapper[4799]: I0121 17:54:03.971318 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.258199 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21e4d907-4ebb-4839-ab96-454bd4d08954" path="/var/lib/kubelet/pods/21e4d907-4ebb-4839-ab96-454bd4d08954/volumes" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.616059 4799 generic.go:334] "Generic (PLEG): container finished" podID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerID="8ee95166b28730dc2315e9fb814f9c3ce40b1eec3d6030a1f60cfe94b5a99fc2" exitCode=0 Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.616157 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerDied","Data":"8ee95166b28730dc2315e9fb814f9c3ce40b1eec3d6030a1f60cfe94b5a99fc2"} Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.627263 4799 generic.go:334] "Generic (PLEG): container finished" podID="5ff098a1-6561-4017-98c9-3014f678faad" containerID="9f0fc1351f8528068db150962a8958eda5dbc0473e91ef43e13f35d02bb6ca78" exitCode=0 Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.627311 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ff098a1-6561-4017-98c9-3014f678faad","Type":"ContainerDied","Data":"9f0fc1351f8528068db150962a8958eda5dbc0473e91ef43e13f35d02bb6ca78"} Jan 21 17:54:04 crc kubenswrapper[4799]: E0121 17:54:04.650651 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83fb2ae7_cdbf_4f0c_9550_c937bc8f75c9.slice/crio-conmon-8ee95166b28730dc2315e9fb814f9c3ce40b1eec3d6030a1f60cfe94b5a99fc2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83fb2ae7_cdbf_4f0c_9550_c937bc8f75c9.slice/crio-8ee95166b28730dc2315e9fb814f9c3ce40b1eec3d6030a1f60cfe94b5a99fc2.scope\": RecentStats: unable to find data in memory cache]" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.839874 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.872525 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.874919 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929097 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-combined-ca-bundle\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929201 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-internal-tls-certs\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929250 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcbtd\" (UniqueName: \"kubernetes.io/projected/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-kube-api-access-zcbtd\") pod \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929280 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-config-data\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929356 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-run-httpd\") pod \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929382 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-combined-ca-bundle\") pod \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929410 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjt5r\" (UniqueName: \"kubernetes.io/projected/5ff098a1-6561-4017-98c9-3014f678faad-kube-api-access-bjt5r\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929447 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-logs\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929507 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929564 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-config-data\") pod \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929588 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-scripts\") pod \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929613 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-log-httpd\") pod \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.929639 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-httpd-run\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.930471 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.933281 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-logs" (OuterVolumeSpecName: "logs") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.934101 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" (UID: "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.939010 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-kube-api-access-zcbtd" (OuterVolumeSpecName: "kube-api-access-zcbtd") pod "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" (UID: "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9"). InnerVolumeSpecName "kube-api-access-zcbtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.939693 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ff098a1-6561-4017-98c9-3014f678faad-kube-api-access-bjt5r" (OuterVolumeSpecName: "kube-api-access-bjt5r") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "kube-api-access-bjt5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.941258 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" (UID: "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:04 crc kubenswrapper[4799]: I0121 17:54:04.945697 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:04.997380 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-scripts" (OuterVolumeSpecName: "scripts") pod "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" (UID: "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.033962 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.033996 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-scripts\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034040 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-sg-core-conf-yaml\") pod \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\" (UID: \"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9\") " Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034073 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-combined-ca-bundle\") pod \"5ff098a1-6561-4017-98c9-3014f678faad\" (UID: \"5ff098a1-6561-4017-98c9-3014f678faad\") " Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034663 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcbtd\" (UniqueName: \"kubernetes.io/projected/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-kube-api-access-zcbtd\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034682 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034691 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjt5r\" (UniqueName: \"kubernetes.io/projected/5ff098a1-6561-4017-98c9-3014f678faad-kube-api-access-bjt5r\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034701 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034722 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034730 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034738 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.034746 4799 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5ff098a1-6561-4017-98c9-3014f678faad-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: W0121 17:54:05.036401 4799 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/5ff098a1-6561-4017-98c9-3014f678faad/volumes/kubernetes.io~secret/combined-ca-bundle Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.036428 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.038341 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-scripts" (OuterVolumeSpecName: "scripts") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.079703 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.085403 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.095061 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" (UID: "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.132499 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-config-data" (OuterVolumeSpecName: "config-data") pod "5ff098a1-6561-4017-98c9-3014f678faad" (UID: "5ff098a1-6561-4017-98c9-3014f678faad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.137320 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.137348 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.137358 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.137368 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.137379 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.137387 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ff098a1-6561-4017-98c9-3014f678faad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.149829 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" (UID: "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.186241 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-config-data" (OuterVolumeSpecName: "config-data") pod "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" (UID: "83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.239138 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.239186 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.661320 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9","Type":"ContainerDied","Data":"5597f499616e79910a649e3f20b64613ebc0eba2d875bf265b6166968a0372f1"} Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.661646 4799 scope.go:117] "RemoveContainer" containerID="f3d24e824c5b4fd41d456d083ba8db94d0ffd69ef76103d74e8e82085491cef6" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.661866 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.674745 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5ff098a1-6561-4017-98c9-3014f678faad","Type":"ContainerDied","Data":"6cc85f0db739df8a87ec3fa0d3b20326292782eb902daada63ea1250ad30b890"} Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.674862 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.694443 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"486557f2-139f-4297-8c6c-9dc9ed6f5cdc","Type":"ContainerStarted","Data":"ba4d99c29b0366280562ec8032a9632ac3b0749c2628d52e430ecdcc661e90fa"} Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.694496 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"486557f2-139f-4297-8c6c-9dc9ed6f5cdc","Type":"ContainerStarted","Data":"d6ddea74b459faafa885c40a25359baa2f6ece6b10cb4aff223091f112b4d404"} Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.774092 4799 scope.go:117] "RemoveContainer" containerID="e91f636028fa1288e6f4ed94df158446264ca2addab8a0b1933bace1e007f1aa" Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.791345 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.826587 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.843376 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:05 crc kubenswrapper[4799]: I0121 17:54:05.850690 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.861865 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:54:06 crc kubenswrapper[4799]: E0121 17:54:05.862416 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="proxy-httpd" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862433 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="proxy-httpd" Jan 21 17:54:06 crc kubenswrapper[4799]: E0121 17:54:05.862450 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="ceilometer-notification-agent" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862458 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="ceilometer-notification-agent" Jan 21 17:54:06 crc kubenswrapper[4799]: E0121 17:54:05.862480 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ff098a1-6561-4017-98c9-3014f678faad" containerName="glance-httpd" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862492 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ff098a1-6561-4017-98c9-3014f678faad" containerName="glance-httpd" Jan 21 17:54:06 crc kubenswrapper[4799]: E0121 17:54:05.862503 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="sg-core" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862513 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="sg-core" Jan 21 17:54:06 crc kubenswrapper[4799]: E0121 17:54:05.862524 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ff098a1-6561-4017-98c9-3014f678faad" containerName="glance-log" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862533 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ff098a1-6561-4017-98c9-3014f678faad" containerName="glance-log" Jan 21 17:54:06 crc kubenswrapper[4799]: E0121 17:54:05.862545 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="ceilometer-central-agent" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862554 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="ceilometer-central-agent" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862793 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="sg-core" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862818 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ff098a1-6561-4017-98c9-3014f678faad" containerName="glance-log" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862830 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="ceilometer-central-agent" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862844 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="ceilometer-notification-agent" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862859 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ff098a1-6561-4017-98c9-3014f678faad" containerName="glance-httpd" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.862879 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" containerName="proxy-httpd" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.864263 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:05.870553 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.005817 4799 scope.go:117] "RemoveContainer" containerID="486a7dec72ed50a4449b74c04742b0d27055215f15973a79cc5ad76b5fea5863" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.006252 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.006423 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.021580 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.025984 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.031086 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.034791 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.040760 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.066568 4799 scope.go:117] "RemoveContainer" containerID="8ee95166b28730dc2315e9fb814f9c3ce40b1eec3d6030a1f60cfe94b5a99fc2" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.108582 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.108680 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.108727 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.108756 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-logs\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.108862 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.108898 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.108921 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk8r9\" (UniqueName: \"kubernetes.io/projected/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-kube-api-access-mk8r9\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.108992 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.123077 4799 scope.go:117] "RemoveContainer" containerID="9f0fc1351f8528068db150962a8958eda5dbc0473e91ef43e13f35d02bb6ca78" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.184330 4799 scope.go:117] "RemoveContainer" containerID="919a34b4723f6e4a231589d646cca27f8272d69e83efd3c9631383fc2e9194e2" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.205262 4799 scope.go:117] "RemoveContainer" containerID="d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210487 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210547 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210680 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210711 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-log-httpd\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210740 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210765 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210785 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-logs\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210853 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-scripts\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210952 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-run-httpd\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.210987 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.211022 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.211051 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.211081 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk8r9\" (UniqueName: \"kubernetes.io/projected/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-kube-api-access-mk8r9\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.211114 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rmr2\" (UniqueName: \"kubernetes.io/projected/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-kube-api-access-2rmr2\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.211247 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-config-data\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.211496 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-logs\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.211603 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.211815 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.222898 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.223171 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.226790 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.228865 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.245661 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk8r9\" (UniqueName: \"kubernetes.io/projected/5152eb20-55f2-4c0c-9a8b-6b1e9043abf9-kube-api-access-mk8r9\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.253563 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9\") " pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.257440 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ff098a1-6561-4017-98c9-3014f678faad" path="/var/lib/kubelet/pods/5ff098a1-6561-4017-98c9-3014f678faad/volumes" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.258606 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9" path="/var/lib/kubelet/pods/83fb2ae7-cdbf-4f0c-9550-c937bc8f75c9/volumes" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.322542 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-config-data\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.322627 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.322661 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-log-httpd\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.322717 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-scripts\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.322785 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-run-httpd\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.322811 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.322845 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rmr2\" (UniqueName: \"kubernetes.io/projected/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-kube-api-access-2rmr2\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.325152 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-log-httpd\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.325560 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-run-httpd\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.327031 4799 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podf460180e-2550-4286-ae68-85d752d3a3a3"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podf460180e-2550-4286-ae68-85d752d3a3a3] : Timed out while waiting for systemd to remove kubepods-besteffort-podf460180e_2550_4286_ae68_85d752d3a3a3.slice" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.327183 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-scripts\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.330248 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-config-data\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.338195 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.339801 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.344811 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rmr2\" (UniqueName: \"kubernetes.io/projected/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-kube-api-access-2rmr2\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.350047 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.352742 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.725718 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"486557f2-139f-4297-8c6c-9dc9ed6f5cdc","Type":"ContainerStarted","Data":"b611aeb3afe416b907d9a326c4e1f3e48bb34b47ca21e8c013bd3f7fb0cf1916"} Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.743186 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerStarted","Data":"fc0ae9821a0caba3927843d07695cfdec56a49786da5338b339245245d323035"} Jan 21 17:54:06 crc kubenswrapper[4799]: I0121 17:54:06.765748 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.765725028 podStartE2EDuration="3.765725028s" podCreationTimestamp="2026-01-21 17:54:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:06.746882231 +0000 UTC m=+1273.373172274" watchObservedRunningTime="2026-01-21 17:54:06.765725028 +0000 UTC m=+1273.392015051" Jan 21 17:54:07 crc kubenswrapper[4799]: I0121 17:54:06.999711 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:07 crc kubenswrapper[4799]: I0121 17:54:07.180803 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 21 17:54:07 crc kubenswrapper[4799]: I0121 17:54:07.761998 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerStarted","Data":"995a0eae055fbdc6641b75a43b7ed72ce31134fc842d6f302138e2cb6e355b25"} Jan 21 17:54:07 crc kubenswrapper[4799]: I0121 17:54:07.762605 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerStarted","Data":"34ae1d6a4490e166982a7e0939a2fadfac41886a6e7117297adde17d82ce8b7c"} Jan 21 17:54:07 crc kubenswrapper[4799]: I0121 17:54:07.765554 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9","Type":"ContainerStarted","Data":"09fe7886f16d6b4535f6fc1dd6d8668623fb4a530aac2044823ec0a88c69439b"} Jan 21 17:54:08 crc kubenswrapper[4799]: I0121 17:54:08.777208 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9","Type":"ContainerStarted","Data":"904f3960e1bd2cb27a53fc28340a862787127a087708d51f63025b6f3a484945"} Jan 21 17:54:08 crc kubenswrapper[4799]: I0121 17:54:08.778547 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5152eb20-55f2-4c0c-9a8b-6b1e9043abf9","Type":"ContainerStarted","Data":"7b9b35da8101c77bc82534192948721cb5f738c3e349542a620a1f78d1f2fd92"} Jan 21 17:54:08 crc kubenswrapper[4799]: I0121 17:54:08.780322 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerStarted","Data":"b2eb0c516a58245631231bc722aeeeda844bf91add33e0cdb51e488991bd4597"} Jan 21 17:54:08 crc kubenswrapper[4799]: I0121 17:54:08.780422 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerStarted","Data":"6453eb4470b30ca2611cd25fdb7a87d5dc976e5d68a2e62bc39ef8a4d8b2b4ab"} Jan 21 17:54:08 crc kubenswrapper[4799]: I0121 17:54:08.802611 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.802591449 podStartE2EDuration="3.802591449s" podCreationTimestamp="2026-01-21 17:54:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:08.798733832 +0000 UTC m=+1275.425023865" watchObservedRunningTime="2026-01-21 17:54:08.802591449 +0000 UTC m=+1275.428881472" Jan 21 17:54:10 crc kubenswrapper[4799]: I0121 17:54:10.808430 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerStarted","Data":"9830c615ed08576c95a23a692cd33245b4618cf25aee13d57428205ece62c9da"} Jan 21 17:54:10 crc kubenswrapper[4799]: I0121 17:54:10.809403 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 21 17:54:10 crc kubenswrapper[4799]: I0121 17:54:10.842784 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.302156622 podStartE2EDuration="5.842763373s" podCreationTimestamp="2026-01-21 17:54:05 +0000 UTC" firstStartedPulling="2026-01-21 17:54:06.978860609 +0000 UTC m=+1273.605150642" lastFinishedPulling="2026-01-21 17:54:09.51946737 +0000 UTC m=+1276.145757393" observedRunningTime="2026-01-21 17:54:10.84124559 +0000 UTC m=+1277.467535613" watchObservedRunningTime="2026-01-21 17:54:10.842763373 +0000 UTC m=+1277.469053396" Jan 21 17:54:13 crc kubenswrapper[4799]: I0121 17:54:13.819946 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:13 crc kubenswrapper[4799]: I0121 17:54:13.820227 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:13 crc kubenswrapper[4799]: I0121 17:54:13.852840 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:13 crc kubenswrapper[4799]: I0121 17:54:13.898348 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:13 crc kubenswrapper[4799]: I0121 17:54:13.949348 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:54:13 crc kubenswrapper[4799]: I0121 17:54:13.971731 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 21 17:54:13 crc kubenswrapper[4799]: I0121 17:54:13.971838 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 21 17:54:14 crc kubenswrapper[4799]: I0121 17:54:14.014509 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 21 17:54:14 crc kubenswrapper[4799]: I0121 17:54:14.015804 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 21 17:54:14 crc kubenswrapper[4799]: I0121 17:54:14.852685 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 21 17:54:14 crc kubenswrapper[4799]: I0121 17:54:14.852752 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 21 17:54:15 crc kubenswrapper[4799]: I0121 17:54:15.861870 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-decision-engine-0" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" containerID="cri-o://fc0ae9821a0caba3927843d07695cfdec56a49786da5338b339245245d323035" gracePeriod=30 Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.341335 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.341387 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.399309 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.400419 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.867843 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.871803 4799 generic.go:334] "Generic (PLEG): container finished" podID="310c250a-8e8a-402c-84d6-5fb50340d73d" containerID="10fbb7268ce91cc9fda07cec442f102c130869426157347b4a3061eb4b7a5461" exitCode=0 Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.871882 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" event={"ID":"310c250a-8e8a-402c-84d6-5fb50340d73d","Type":"ContainerDied","Data":"10fbb7268ce91cc9fda07cec442f102c130869426157347b4a3061eb4b7a5461"} Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.871921 4799 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.872458 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.872592 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:16 crc kubenswrapper[4799]: I0121 17:54:16.881030 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.248212 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.313016 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-scripts\") pod \"310c250a-8e8a-402c-84d6-5fb50340d73d\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.313191 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-combined-ca-bundle\") pod \"310c250a-8e8a-402c-84d6-5fb50340d73d\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.313842 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lf7fj\" (UniqueName: \"kubernetes.io/projected/310c250a-8e8a-402c-84d6-5fb50340d73d-kube-api-access-lf7fj\") pod \"310c250a-8e8a-402c-84d6-5fb50340d73d\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.313962 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-config-data\") pod \"310c250a-8e8a-402c-84d6-5fb50340d73d\" (UID: \"310c250a-8e8a-402c-84d6-5fb50340d73d\") " Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.327464 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/310c250a-8e8a-402c-84d6-5fb50340d73d-kube-api-access-lf7fj" (OuterVolumeSpecName: "kube-api-access-lf7fj") pod "310c250a-8e8a-402c-84d6-5fb50340d73d" (UID: "310c250a-8e8a-402c-84d6-5fb50340d73d"). InnerVolumeSpecName "kube-api-access-lf7fj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.351363 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-scripts" (OuterVolumeSpecName: "scripts") pod "310c250a-8e8a-402c-84d6-5fb50340d73d" (UID: "310c250a-8e8a-402c-84d6-5fb50340d73d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.391305 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-config-data" (OuterVolumeSpecName: "config-data") pod "310c250a-8e8a-402c-84d6-5fb50340d73d" (UID: "310c250a-8e8a-402c-84d6-5fb50340d73d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.396567 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "310c250a-8e8a-402c-84d6-5fb50340d73d" (UID: "310c250a-8e8a-402c-84d6-5fb50340d73d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.417371 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lf7fj\" (UniqueName: \"kubernetes.io/projected/310c250a-8e8a-402c-84d6-5fb50340d73d-kube-api-access-lf7fj\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.417422 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.417439 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.417451 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/310c250a-8e8a-402c-84d6-5fb50340d73d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.846499 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.849388 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.896308 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.896876 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-w4p6b" event={"ID":"310c250a-8e8a-402c-84d6-5fb50340d73d","Type":"ContainerDied","Data":"69e31d820f031e7077c0cfd947bed5936546fb14a303963d7a88a675da781c50"} Jan 21 17:54:18 crc kubenswrapper[4799]: I0121 17:54:18.896923 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69e31d820f031e7077c0cfd947bed5936546fb14a303963d7a88a675da781c50" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.082570 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 21 17:54:19 crc kubenswrapper[4799]: E0121 17:54:19.083082 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="310c250a-8e8a-402c-84d6-5fb50340d73d" containerName="nova-cell0-conductor-db-sync" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.083103 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="310c250a-8e8a-402c-84d6-5fb50340d73d" containerName="nova-cell0-conductor-db-sync" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.083365 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="310c250a-8e8a-402c-84d6-5fb50340d73d" containerName="nova-cell0-conductor-db-sync" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.084787 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.090414 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xzc95" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.090523 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.101802 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.143392 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d21651e9-1ffb-472f-8c41-652621413b50-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.143465 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj5mk\" (UniqueName: \"kubernetes.io/projected/d21651e9-1ffb-472f-8c41-652621413b50-kube-api-access-zj5mk\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.143516 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d21651e9-1ffb-472f-8c41-652621413b50-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.245054 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d21651e9-1ffb-472f-8c41-652621413b50-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.245223 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d21651e9-1ffb-472f-8c41-652621413b50-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.245276 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj5mk\" (UniqueName: \"kubernetes.io/projected/d21651e9-1ffb-472f-8c41-652621413b50-kube-api-access-zj5mk\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.257192 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d21651e9-1ffb-472f-8c41-652621413b50-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.258788 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d21651e9-1ffb-472f-8c41-652621413b50-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.272174 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj5mk\" (UniqueName: \"kubernetes.io/projected/d21651e9-1ffb-472f-8c41-652621413b50-kube-api-access-zj5mk\") pod \"nova-cell0-conductor-0\" (UID: \"d21651e9-1ffb-472f-8c41-652621413b50\") " pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.425236 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.910109 4799 generic.go:334] "Generic (PLEG): container finished" podID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerID="fc0ae9821a0caba3927843d07695cfdec56a49786da5338b339245245d323035" exitCode=0 Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.910188 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerDied","Data":"fc0ae9821a0caba3927843d07695cfdec56a49786da5338b339245245d323035"} Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.910503 4799 scope.go:117] "RemoveContainer" containerID="d54c9080e88caa75a6ed11cd37e4d9f1b794c81d3c3beb94fa50a91e66373329" Jan 21 17:54:19 crc kubenswrapper[4799]: I0121 17:54:19.940518 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.371724 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.471417 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgszb\" (UniqueName: \"kubernetes.io/projected/e3555046-24d9-4700-bdb8-0a09c35f651a-kube-api-access-vgszb\") pod \"e3555046-24d9-4700-bdb8-0a09c35f651a\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.471537 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3555046-24d9-4700-bdb8-0a09c35f651a-logs\") pod \"e3555046-24d9-4700-bdb8-0a09c35f651a\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.471581 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-config-data\") pod \"e3555046-24d9-4700-bdb8-0a09c35f651a\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.471605 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-custom-prometheus-ca\") pod \"e3555046-24d9-4700-bdb8-0a09c35f651a\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.471657 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-combined-ca-bundle\") pod \"e3555046-24d9-4700-bdb8-0a09c35f651a\" (UID: \"e3555046-24d9-4700-bdb8-0a09c35f651a\") " Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.473536 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3555046-24d9-4700-bdb8-0a09c35f651a-logs" (OuterVolumeSpecName: "logs") pod "e3555046-24d9-4700-bdb8-0a09c35f651a" (UID: "e3555046-24d9-4700-bdb8-0a09c35f651a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.479785 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3555046-24d9-4700-bdb8-0a09c35f651a-kube-api-access-vgszb" (OuterVolumeSpecName: "kube-api-access-vgszb") pod "e3555046-24d9-4700-bdb8-0a09c35f651a" (UID: "e3555046-24d9-4700-bdb8-0a09c35f651a"). InnerVolumeSpecName "kube-api-access-vgszb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.513209 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "e3555046-24d9-4700-bdb8-0a09c35f651a" (UID: "e3555046-24d9-4700-bdb8-0a09c35f651a"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.513438 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3555046-24d9-4700-bdb8-0a09c35f651a" (UID: "e3555046-24d9-4700-bdb8-0a09c35f651a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.554533 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-config-data" (OuterVolumeSpecName: "config-data") pod "e3555046-24d9-4700-bdb8-0a09c35f651a" (UID: "e3555046-24d9-4700-bdb8-0a09c35f651a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.574418 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgszb\" (UniqueName: \"kubernetes.io/projected/e3555046-24d9-4700-bdb8-0a09c35f651a-kube-api-access-vgszb\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.574725 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3555046-24d9-4700-bdb8-0a09c35f651a-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.574801 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.574876 4799 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.574943 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3555046-24d9-4700-bdb8-0a09c35f651a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.922979 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.922969 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"e3555046-24d9-4700-bdb8-0a09c35f651a","Type":"ContainerDied","Data":"c35795e1bae0c6341fcf99661e30668e0247e1b517364827c7fcb0ae934ea1b5"} Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.923407 4799 scope.go:117] "RemoveContainer" containerID="fc0ae9821a0caba3927843d07695cfdec56a49786da5338b339245245d323035" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.925094 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d21651e9-1ffb-472f-8c41-652621413b50","Type":"ContainerStarted","Data":"258f66050a089633cac61386cebc89ff5537a2d2dc530f9689359e1b0cd30907"} Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.925121 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"d21651e9-1ffb-472f-8c41-652621413b50","Type":"ContainerStarted","Data":"f6bea85b2170cf00c217bedefe06a8b29e880a3ba9e531f3cadf2f1761e52c60"} Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.925445 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.948710 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.948689775 podStartE2EDuration="1.948689775s" podCreationTimestamp="2026-01-21 17:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:20.947696408 +0000 UTC m=+1287.573986441" watchObservedRunningTime="2026-01-21 17:54:20.948689775 +0000 UTC m=+1287.574979798" Jan 21 17:54:20 crc kubenswrapper[4799]: I0121 17:54:20.984300 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.000903 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.019435 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:54:21 crc kubenswrapper[4799]: E0121 17:54:21.019923 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.019938 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: E0121 17:54:21.019952 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.019957 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: E0121 17:54:21.019969 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.019975 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: E0121 17:54:21.019999 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.020004 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.020217 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.020234 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.020913 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.025736 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.026061 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.085136 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.085206 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.085256 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl8xb\" (UniqueName: \"kubernetes.io/projected/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-kube-api-access-sl8xb\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.085290 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-config-data\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.085309 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-logs\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.188730 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.188834 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.188911 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl8xb\" (UniqueName: \"kubernetes.io/projected/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-kube-api-access-sl8xb\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.188968 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-config-data\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.188995 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-logs\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.189521 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-logs\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.194315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.197791 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-config-data\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.198401 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.215414 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl8xb\" (UniqueName: \"kubernetes.io/projected/30f6abc1-fcdc-4901-9e88-3b6c5fd2a223-kube-api-access-sl8xb\") pod \"watcher-decision-engine-0\" (UID: \"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223\") " pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.337972 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.776036 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Jan 21 17:54:21 crc kubenswrapper[4799]: I0121 17:54:21.951019 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223","Type":"ContainerStarted","Data":"969c1e87049a0405815223c16616a5697835ec3f398bb11e488ab8eec827016e"} Jan 21 17:54:22 crc kubenswrapper[4799]: I0121 17:54:22.218744 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" path="/var/lib/kubelet/pods/e3555046-24d9-4700-bdb8-0a09c35f651a/volumes" Jan 21 17:54:22 crc kubenswrapper[4799]: I0121 17:54:22.962073 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"30f6abc1-fcdc-4901-9e88-3b6c5fd2a223","Type":"ContainerStarted","Data":"8129cf0090d31d608af7c8121deea1b5648c43f7b13df84d21891a25f0c27f0c"} Jan 21 17:54:22 crc kubenswrapper[4799]: I0121 17:54:22.995720 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=2.995696751 podStartE2EDuration="2.995696751s" podCreationTimestamp="2026-01-21 17:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:22.984831407 +0000 UTC m=+1289.611121440" watchObservedRunningTime="2026-01-21 17:54:22.995696751 +0000 UTC m=+1289.621986774" Jan 21 17:54:25 crc kubenswrapper[4799]: I0121 17:54:25.971243 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:54:25 crc kubenswrapper[4799]: I0121 17:54:25.971580 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:54:25 crc kubenswrapper[4799]: I0121 17:54:25.971649 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:54:25 crc kubenswrapper[4799]: I0121 17:54:25.972843 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5fdd831026afa966e0f760fecb7476b95aadfcd525b00468c8c89ce1d2df0632"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 17:54:25 crc kubenswrapper[4799]: I0121 17:54:25.972922 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://5fdd831026afa966e0f760fecb7476b95aadfcd525b00468c8c89ce1d2df0632" gracePeriod=600 Jan 21 17:54:27 crc kubenswrapper[4799]: I0121 17:54:27.019626 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="5fdd831026afa966e0f760fecb7476b95aadfcd525b00468c8c89ce1d2df0632" exitCode=0 Jan 21 17:54:27 crc kubenswrapper[4799]: I0121 17:54:27.019729 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"5fdd831026afa966e0f760fecb7476b95aadfcd525b00468c8c89ce1d2df0632"} Jan 21 17:54:27 crc kubenswrapper[4799]: I0121 17:54:27.020462 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"1b8c7c0681d7bc6849b2f8af2f982319bd30fd0c75b4bf458a9fe26a72616dd0"} Jan 21 17:54:27 crc kubenswrapper[4799]: I0121 17:54:27.020501 4799 scope.go:117] "RemoveContainer" containerID="ae5330e16575441a8b84498a2fefd6345766a3ffb339a011bad17c508c054c31" Jan 21 17:54:29 crc kubenswrapper[4799]: I0121 17:54:29.451963 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 21 17:54:29 crc kubenswrapper[4799]: I0121 17:54:29.903174 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-p5h5t"] Jan 21 17:54:29 crc kubenswrapper[4799]: I0121 17:54:29.903983 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:29 crc kubenswrapper[4799]: I0121 17:54:29.904018 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3555046-24d9-4700-bdb8-0a09c35f651a" containerName="watcher-decision-engine" Jan 21 17:54:29 crc kubenswrapper[4799]: I0121 17:54:29.904888 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:29 crc kubenswrapper[4799]: I0121 17:54:29.909513 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 21 17:54:29 crc kubenswrapper[4799]: I0121 17:54:29.909730 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 21 17:54:29 crc kubenswrapper[4799]: I0121 17:54:29.917119 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-p5h5t"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.139147 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-scripts\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.139205 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.139287 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-config-data\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.139308 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfs7c\" (UniqueName: \"kubernetes.io/projected/fbc0e1b8-d099-4a3b-b501-b8486d893927-kube-api-access-tfs7c\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.241655 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-config-data\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.241707 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfs7c\" (UniqueName: \"kubernetes.io/projected/fbc0e1b8-d099-4a3b-b501-b8486d893927-kube-api-access-tfs7c\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.241846 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-scripts\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.241864 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.253582 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.259682 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-scripts\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.261930 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-config-data\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.272017 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfs7c\" (UniqueName: \"kubernetes.io/projected/fbc0e1b8-d099-4a3b-b501-b8486d893927-kube-api-access-tfs7c\") pod \"nova-cell0-cell-mapping-p5h5t\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.280723 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.283177 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.286321 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.298881 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.344306 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-config-data\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.344821 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfbnc\" (UniqueName: \"kubernetes.io/projected/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-kube-api-access-hfbnc\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.344885 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.414951 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.416980 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.430341 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.438494 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.447192 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-config-data\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.447263 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b86db77-5206-4395-bdd7-138d53ed65f3-logs\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.447299 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-config-data\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.447340 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfbnc\" (UniqueName: \"kubernetes.io/projected/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-kube-api-access-hfbnc\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.447372 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k92lk\" (UniqueName: \"kubernetes.io/projected/7b86db77-5206-4395-bdd7-138d53ed65f3-kube-api-access-k92lk\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.447408 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.447464 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.457201 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-config-data\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.466964 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.482823 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.487734 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfbnc\" (UniqueName: \"kubernetes.io/projected/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-kube-api-access-hfbnc\") pod \"nova-scheduler-0\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.550799 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.550951 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-config-data\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.551028 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b86db77-5206-4395-bdd7-138d53ed65f3-logs\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.551105 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k92lk\" (UniqueName: \"kubernetes.io/projected/7b86db77-5206-4395-bdd7-138d53ed65f3-kube-api-access-k92lk\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.561265 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.566521 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-config-data\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.566885 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b86db77-5206-4395-bdd7-138d53ed65f3-logs\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.620074 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k92lk\" (UniqueName: \"kubernetes.io/projected/7b86db77-5206-4395-bdd7-138d53ed65f3-kube-api-access-k92lk\") pod \"nova-metadata-0\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.685935 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.687754 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.709698 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.721674 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.740163 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.744482 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.750053 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.769404 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.800170 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-566f9f46c9-qqp64"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.811834 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.865770 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g89ll\" (UniqueName: \"kubernetes.io/projected/30d79297-b2b1-4eb9-9d7c-97069febc7df-kube-api-access-g89ll\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.865858 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-logs\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.865921 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nmfv\" (UniqueName: \"kubernetes.io/projected/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-kube-api-access-9nmfv\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.865975 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.866006 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-config-data\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.866028 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.866052 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.892445 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.939728 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971350 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-nb\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971692 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nmfv\" (UniqueName: \"kubernetes.io/projected/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-kube-api-access-9nmfv\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971748 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-swift-storage-0\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971772 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971800 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-config-data\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971826 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971853 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971901 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h52cw\" (UniqueName: \"kubernetes.io/projected/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-kube-api-access-h52cw\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971926 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g89ll\" (UniqueName: \"kubernetes.io/projected/30d79297-b2b1-4eb9-9d7c-97069febc7df-kube-api-access-g89ll\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971942 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-sb\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.971993 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-logs\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.972015 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-svc\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.972034 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-config\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:30 crc kubenswrapper[4799]: I0121 17:54:30.972891 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-logs\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.000159 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-config-data\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.008878 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.033098 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.033234 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.038669 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-566f9f46c9-qqp64"] Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.069854 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nmfv\" (UniqueName: \"kubernetes.io/projected/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-kube-api-access-9nmfv\") pod \"nova-api-0\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " pod="openstack/nova-api-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.070750 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g89ll\" (UniqueName: \"kubernetes.io/projected/30d79297-b2b1-4eb9-9d7c-97069febc7df-kube-api-access-g89ll\") pod \"nova-cell1-novncproxy-0\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.084091 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.088609 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h52cw\" (UniqueName: \"kubernetes.io/projected/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-kube-api-access-h52cw\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.088667 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-sb\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.088744 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-svc\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.088777 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-config\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.088841 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-nb\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.088918 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-swift-storage-0\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.089990 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-swift-storage-0\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.092832 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-svc\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.093497 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-sb\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.094091 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-config\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.094708 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-nb\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.144596 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.155144 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h52cw\" (UniqueName: \"kubernetes.io/projected/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-kube-api-access-h52cw\") pod \"dnsmasq-dns-566f9f46c9-qqp64\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.169706 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.209454 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-p5h5t"] Jan 21 17:54:31 crc kubenswrapper[4799]: W0121 17:54:31.242654 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbc0e1b8_d099_4a3b_b501_b8486d893927.slice/crio-90dcf45488a7734b7e9f9399bfe800446f97f8808aaee5bc7e126fff59614062 WatchSource:0}: Error finding container 90dcf45488a7734b7e9f9399bfe800446f97f8808aaee5bc7e126fff59614062: Status 404 returned error can't find the container with id 90dcf45488a7734b7e9f9399bfe800446f97f8808aaee5bc7e126fff59614062 Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.339477 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.418064 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.661939 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.881551 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.955262 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zbcj"] Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.968388 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zbcj"] Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.968490 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.980761 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 21 17:54:31 crc kubenswrapper[4799]: I0121 17:54:31.980963 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.006621 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:54:32 crc kubenswrapper[4799]: W0121 17:54:32.030190 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30d79297_b2b1_4eb9_9d7c_97069febc7df.slice/crio-9531b7836666087e0c91d5c354d8cb75e4a82ae603d574eb0d6a4079b812df9f WatchSource:0}: Error finding container 9531b7836666087e0c91d5c354d8cb75e4a82ae603d574eb0d6a4079b812df9f: Status 404 returned error can't find the container with id 9531b7836666087e0c91d5c354d8cb75e4a82ae603d574eb0d6a4079b812df9f Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.110110 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.118718 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-566f9f46c9-qqp64"] Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.124834 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.125017 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-scripts\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.125148 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-config-data\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.125207 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nkbr\" (UniqueName: \"kubernetes.io/projected/fecfb45f-5926-41d7-b7c8-317a1a077eaf-kube-api-access-5nkbr\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: W0121 17:54:32.133844 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbe752ec_bf40_44cc_b1df_c3d7e2acc01c.slice/crio-a5260133e461b86b6e3056ef07646624676a925afae4d955d37ae123b395ba4d WatchSource:0}: Error finding container a5260133e461b86b6e3056ef07646624676a925afae4d955d37ae123b395ba4d: Status 404 returned error can't find the container with id a5260133e461b86b6e3056ef07646624676a925afae4d955d37ae123b395ba4d Jan 21 17:54:32 crc kubenswrapper[4799]: W0121 17:54:32.138634 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbef4bbc_9610_4354_a9d8_ffc3f7d1bac7.slice/crio-4aa045fdd0cd0ac8560d178c9f95c69ee4f785353616086b4928a0d5e3521675 WatchSource:0}: Error finding container 4aa045fdd0cd0ac8560d178c9f95c69ee4f785353616086b4928a0d5e3521675: Status 404 returned error can't find the container with id 4aa045fdd0cd0ac8560d178c9f95c69ee4f785353616086b4928a0d5e3521675 Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.165226 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p5h5t" event={"ID":"fbc0e1b8-d099-4a3b-b501-b8486d893927","Type":"ContainerStarted","Data":"2aca9b9dd8679a92295ee233b5fb8fcf01609815d98a37a5e078b1def4d1f871"} Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.165706 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p5h5t" event={"ID":"fbc0e1b8-d099-4a3b-b501-b8486d893927","Type":"ContainerStarted","Data":"90dcf45488a7734b7e9f9399bfe800446f97f8808aaee5bc7e126fff59614062"} Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.172413 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7","Type":"ContainerStarted","Data":"4aa045fdd0cd0ac8560d178c9f95c69ee4f785353616086b4928a0d5e3521675"} Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.176248 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b86db77-5206-4395-bdd7-138d53ed65f3","Type":"ContainerStarted","Data":"4a2bff98a3295c2fa64169006dd2416465c5df3dd6e834a6ca647cd1a1193029"} Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.178495 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"30d79297-b2b1-4eb9-9d7c-97069febc7df","Type":"ContainerStarted","Data":"9531b7836666087e0c91d5c354d8cb75e4a82ae603d574eb0d6a4079b812df9f"} Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.195336 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42","Type":"ContainerStarted","Data":"2be1f33b22b779fee7812b90945d8d56f2a2935ed30f5b8e5fca0753139d5cec"} Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.197357 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-p5h5t" podStartSLOduration=3.19734271 podStartE2EDuration="3.19734271s" podCreationTimestamp="2026-01-21 17:54:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:32.184201573 +0000 UTC m=+1298.810491596" watchObservedRunningTime="2026-01-21 17:54:32.19734271 +0000 UTC m=+1298.823632733" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.197557 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" event={"ID":"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c","Type":"ContainerStarted","Data":"a5260133e461b86b6e3056ef07646624676a925afae4d955d37ae123b395ba4d"} Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.197894 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.227492 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.227655 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-scripts\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.227734 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-config-data\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.227764 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nkbr\" (UniqueName: \"kubernetes.io/projected/fecfb45f-5926-41d7-b7c8-317a1a077eaf-kube-api-access-5nkbr\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.231851 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.234893 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-config-data\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.244588 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nkbr\" (UniqueName: \"kubernetes.io/projected/fecfb45f-5926-41d7-b7c8-317a1a077eaf-kube-api-access-5nkbr\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.247719 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-scripts\") pod \"nova-cell1-conductor-db-sync-2zbcj\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.254242 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.310368 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:32 crc kubenswrapper[4799]: I0121 17:54:32.941439 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zbcj"] Jan 21 17:54:33 crc kubenswrapper[4799]: I0121 17:54:33.239514 4799 generic.go:334] "Generic (PLEG): container finished" podID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerID="7e978663da3983a470fa4ce6f971f373759a5d234e2561fc446133dd6f35ab6d" exitCode=0 Jan 21 17:54:33 crc kubenswrapper[4799]: I0121 17:54:33.241277 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" event={"ID":"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c","Type":"ContainerDied","Data":"7e978663da3983a470fa4ce6f971f373759a5d234e2561fc446133dd6f35ab6d"} Jan 21 17:54:34 crc kubenswrapper[4799]: I0121 17:54:34.560552 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:34 crc kubenswrapper[4799]: I0121 17:54:34.619266 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:54:35 crc kubenswrapper[4799]: W0121 17:54:35.549258 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfecfb45f_5926_41d7_b7c8_317a1a077eaf.slice/crio-d99fb0e90cbfcf9e7205bb0c49cadf667fce8af001b79505003e050e5dd491b5 WatchSource:0}: Error finding container d99fb0e90cbfcf9e7205bb0c49cadf667fce8af001b79505003e050e5dd491b5: Status 404 returned error can't find the container with id d99fb0e90cbfcf9e7205bb0c49cadf667fce8af001b79505003e050e5dd491b5 Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.346429 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" event={"ID":"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c","Type":"ContainerStarted","Data":"6d4a59a84df18cc8dddecc6170657feb25ce09ebb8106c817a62db9e46a34a40"} Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.347100 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.362818 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42","Type":"ContainerStarted","Data":"ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d"} Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.365690 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.371507 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" podStartSLOduration=6.371488841 podStartE2EDuration="6.371488841s" podCreationTimestamp="2026-01-21 17:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:36.370201545 +0000 UTC m=+1302.996491588" watchObservedRunningTime="2026-01-21 17:54:36.371488841 +0000 UTC m=+1302.997778864" Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.387644 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" event={"ID":"fecfb45f-5926-41d7-b7c8-317a1a077eaf","Type":"ContainerStarted","Data":"d0e5986d3cb474c2f382f374d9b4eb4d38cb9a20691efe800702ae5913597568"} Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.387700 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" event={"ID":"fecfb45f-5926-41d7-b7c8-317a1a077eaf","Type":"ContainerStarted","Data":"d99fb0e90cbfcf9e7205bb0c49cadf667fce8af001b79505003e050e5dd491b5"} Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.396258 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b86db77-5206-4395-bdd7-138d53ed65f3","Type":"ContainerStarted","Data":"994892a8933012dc8e600e8f543720e21e7807602417f82a6026f07368be323a"} Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.405581 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.323275288 podStartE2EDuration="6.405555547s" podCreationTimestamp="2026-01-21 17:54:30 +0000 UTC" firstStartedPulling="2026-01-21 17:54:31.699026892 +0000 UTC m=+1298.325316905" lastFinishedPulling="2026-01-21 17:54:35.781307141 +0000 UTC m=+1302.407597164" observedRunningTime="2026-01-21 17:54:36.401176034 +0000 UTC m=+1303.027466067" watchObservedRunningTime="2026-01-21 17:54:36.405555547 +0000 UTC m=+1303.031845570" Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.410995 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"30d79297-b2b1-4eb9-9d7c-97069febc7df","Type":"ContainerStarted","Data":"66ee79073f4cc6dc8b1141b00917e5bcc81b3041d66a30099086991f7898436d"} Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.411202 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="30d79297-b2b1-4eb9-9d7c-97069febc7df" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://66ee79073f4cc6dc8b1141b00917e5bcc81b3041d66a30099086991f7898436d" gracePeriod=30 Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.446648 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" podStartSLOduration=5.4466291479999995 podStartE2EDuration="5.446629148s" podCreationTimestamp="2026-01-21 17:54:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:36.441488754 +0000 UTC m=+1303.067778767" watchObservedRunningTime="2026-01-21 17:54:36.446629148 +0000 UTC m=+1303.072919161" Jan 21 17:54:36 crc kubenswrapper[4799]: I0121 17:54:36.469080 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.730393279 podStartE2EDuration="6.469056717s" podCreationTimestamp="2026-01-21 17:54:30 +0000 UTC" firstStartedPulling="2026-01-21 17:54:32.043420195 +0000 UTC m=+1298.669710218" lastFinishedPulling="2026-01-21 17:54:35.782083623 +0000 UTC m=+1302.408373656" observedRunningTime="2026-01-21 17:54:36.465843197 +0000 UTC m=+1303.092133220" watchObservedRunningTime="2026-01-21 17:54:36.469056717 +0000 UTC m=+1303.095346740" Jan 21 17:54:37 crc kubenswrapper[4799]: I0121 17:54:37.614586 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7","Type":"ContainerStarted","Data":"6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f"} Jan 21 17:54:37 crc kubenswrapper[4799]: I0121 17:54:37.615379 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7","Type":"ContainerStarted","Data":"8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3"} Jan 21 17:54:37 crc kubenswrapper[4799]: I0121 17:54:37.629484 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerName="nova-metadata-log" containerID="cri-o://994892a8933012dc8e600e8f543720e21e7807602417f82a6026f07368be323a" gracePeriod=30 Jan 21 17:54:37 crc kubenswrapper[4799]: I0121 17:54:37.629894 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b86db77-5206-4395-bdd7-138d53ed65f3","Type":"ContainerStarted","Data":"4954992645c4da1ada3aff540dbf8e479e66b71c12fcf3421dc6fe476aa80599"} Jan 21 17:54:37 crc kubenswrapper[4799]: I0121 17:54:37.630563 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerName="nova-metadata-metadata" containerID="cri-o://4954992645c4da1ada3aff540dbf8e479e66b71c12fcf3421dc6fe476aa80599" gracePeriod=30 Jan 21 17:54:37 crc kubenswrapper[4799]: I0121 17:54:37.667858 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.03089542 podStartE2EDuration="7.667832723s" podCreationTimestamp="2026-01-21 17:54:30 +0000 UTC" firstStartedPulling="2026-01-21 17:54:32.144355898 +0000 UTC m=+1298.770645921" lastFinishedPulling="2026-01-21 17:54:35.781293201 +0000 UTC m=+1302.407583224" observedRunningTime="2026-01-21 17:54:37.643712036 +0000 UTC m=+1304.270002079" watchObservedRunningTime="2026-01-21 17:54:37.667832723 +0000 UTC m=+1304.294122746" Jan 21 17:54:37 crc kubenswrapper[4799]: I0121 17:54:37.680300 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.794317544 podStartE2EDuration="7.680276532s" podCreationTimestamp="2026-01-21 17:54:30 +0000 UTC" firstStartedPulling="2026-01-21 17:54:31.895433936 +0000 UTC m=+1298.521723959" lastFinishedPulling="2026-01-21 17:54:35.781392924 +0000 UTC m=+1302.407682947" observedRunningTime="2026-01-21 17:54:37.663497221 +0000 UTC m=+1304.289787244" watchObservedRunningTime="2026-01-21 17:54:37.680276532 +0000 UTC m=+1304.306566555" Jan 21 17:54:38 crc kubenswrapper[4799]: I0121 17:54:38.643362 4799 generic.go:334] "Generic (PLEG): container finished" podID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerID="4954992645c4da1ada3aff540dbf8e479e66b71c12fcf3421dc6fe476aa80599" exitCode=0 Jan 21 17:54:38 crc kubenswrapper[4799]: I0121 17:54:38.643626 4799 generic.go:334] "Generic (PLEG): container finished" podID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerID="994892a8933012dc8e600e8f543720e21e7807602417f82a6026f07368be323a" exitCode=143 Jan 21 17:54:38 crc kubenswrapper[4799]: I0121 17:54:38.644255 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b86db77-5206-4395-bdd7-138d53ed65f3","Type":"ContainerDied","Data":"4954992645c4da1ada3aff540dbf8e479e66b71c12fcf3421dc6fe476aa80599"} Jan 21 17:54:38 crc kubenswrapper[4799]: I0121 17:54:38.644282 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b86db77-5206-4395-bdd7-138d53ed65f3","Type":"ContainerDied","Data":"994892a8933012dc8e600e8f543720e21e7807602417f82a6026f07368be323a"} Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.139396 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.255633 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k92lk\" (UniqueName: \"kubernetes.io/projected/7b86db77-5206-4395-bdd7-138d53ed65f3-kube-api-access-k92lk\") pod \"7b86db77-5206-4395-bdd7-138d53ed65f3\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.258359 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-config-data\") pod \"7b86db77-5206-4395-bdd7-138d53ed65f3\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.258586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b86db77-5206-4395-bdd7-138d53ed65f3-logs\") pod \"7b86db77-5206-4395-bdd7-138d53ed65f3\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.258764 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-combined-ca-bundle\") pod \"7b86db77-5206-4395-bdd7-138d53ed65f3\" (UID: \"7b86db77-5206-4395-bdd7-138d53ed65f3\") " Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.262781 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b86db77-5206-4395-bdd7-138d53ed65f3-kube-api-access-k92lk" (OuterVolumeSpecName: "kube-api-access-k92lk") pod "7b86db77-5206-4395-bdd7-138d53ed65f3" (UID: "7b86db77-5206-4395-bdd7-138d53ed65f3"). InnerVolumeSpecName "kube-api-access-k92lk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.271835 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b86db77-5206-4395-bdd7-138d53ed65f3-logs" (OuterVolumeSpecName: "logs") pod "7b86db77-5206-4395-bdd7-138d53ed65f3" (UID: "7b86db77-5206-4395-bdd7-138d53ed65f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.293332 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b86db77-5206-4395-bdd7-138d53ed65f3" (UID: "7b86db77-5206-4395-bdd7-138d53ed65f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.302430 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-config-data" (OuterVolumeSpecName: "config-data") pod "7b86db77-5206-4395-bdd7-138d53ed65f3" (UID: "7b86db77-5206-4395-bdd7-138d53ed65f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.361077 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k92lk\" (UniqueName: \"kubernetes.io/projected/7b86db77-5206-4395-bdd7-138d53ed65f3-kube-api-access-k92lk\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.361138 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.361151 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b86db77-5206-4395-bdd7-138d53ed65f3-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.361160 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b86db77-5206-4395-bdd7-138d53ed65f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.660824 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7b86db77-5206-4395-bdd7-138d53ed65f3","Type":"ContainerDied","Data":"4a2bff98a3295c2fa64169006dd2416465c5df3dd6e834a6ca647cd1a1193029"} Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.661195 4799 scope.go:117] "RemoveContainer" containerID="4954992645c4da1ada3aff540dbf8e479e66b71c12fcf3421dc6fe476aa80599" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.660928 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.689721 4799 scope.go:117] "RemoveContainer" containerID="994892a8933012dc8e600e8f543720e21e7807602417f82a6026f07368be323a" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.717150 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.735195 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.748941 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:39 crc kubenswrapper[4799]: E0121 17:54:39.749526 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerName="nova-metadata-log" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.749554 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerName="nova-metadata-log" Jan 21 17:54:39 crc kubenswrapper[4799]: E0121 17:54:39.749608 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerName="nova-metadata-metadata" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.749620 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerName="nova-metadata-metadata" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.749921 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerName="nova-metadata-log" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.749962 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" containerName="nova-metadata-metadata" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.751509 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.754069 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.757001 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.757433 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.770076 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.770139 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.770187 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99600d96-195c-4b05-b7e4-3fd33763e869-logs\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.770235 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-config-data\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.770322 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t5p2\" (UniqueName: \"kubernetes.io/projected/99600d96-195c-4b05-b7e4-3fd33763e869-kube-api-access-5t5p2\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.872736 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t5p2\" (UniqueName: \"kubernetes.io/projected/99600d96-195c-4b05-b7e4-3fd33763e869-kube-api-access-5t5p2\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.872852 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.872876 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.872925 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99600d96-195c-4b05-b7e4-3fd33763e869-logs\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.872957 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-config-data\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.873597 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99600d96-195c-4b05-b7e4-3fd33763e869-logs\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.877817 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.878215 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.882861 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-config-data\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:39 crc kubenswrapper[4799]: I0121 17:54:39.890694 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t5p2\" (UniqueName: \"kubernetes.io/projected/99600d96-195c-4b05-b7e4-3fd33763e869-kube-api-access-5t5p2\") pod \"nova-metadata-0\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " pod="openstack/nova-metadata-0" Jan 21 17:54:40 crc kubenswrapper[4799]: I0121 17:54:40.110456 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:40 crc kubenswrapper[4799]: I0121 17:54:40.289146 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b86db77-5206-4395-bdd7-138d53ed65f3" path="/var/lib/kubelet/pods/7b86db77-5206-4395-bdd7-138d53ed65f3/volumes" Jan 21 17:54:40 crc kubenswrapper[4799]: I0121 17:54:40.596245 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:40 crc kubenswrapper[4799]: W0121 17:54:40.611724 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99600d96_195c_4b05_b7e4_3fd33763e869.slice/crio-e47198c859e05da0ce8d27fbf4c7675d4358d40541b28dc2450c051dc4ddbd52 WatchSource:0}: Error finding container e47198c859e05da0ce8d27fbf4c7675d4358d40541b28dc2450c051dc4ddbd52: Status 404 returned error can't find the container with id e47198c859e05da0ce8d27fbf4c7675d4358d40541b28dc2450c051dc4ddbd52 Jan 21 17:54:40 crc kubenswrapper[4799]: I0121 17:54:40.680591 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"99600d96-195c-4b05-b7e4-3fd33763e869","Type":"ContainerStarted","Data":"e47198c859e05da0ce8d27fbf4c7675d4358d40541b28dc2450c051dc4ddbd52"} Jan 21 17:54:40 crc kubenswrapper[4799]: I0121 17:54:40.723507 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 21 17:54:40 crc kubenswrapper[4799]: I0121 17:54:40.723562 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 21 17:54:40 crc kubenswrapper[4799]: I0121 17:54:40.758359 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.088966 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.145183 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.145270 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.172410 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.250032 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79d9d747c5-mfvl5"] Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.693624 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" podUID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" containerName="dnsmasq-dns" containerID="cri-o://a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80" gracePeriod=10 Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.694809 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"99600d96-195c-4b05-b7e4-3fd33763e869","Type":"ContainerStarted","Data":"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6"} Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.694883 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"99600d96-195c-4b05-b7e4-3fd33763e869","Type":"ContainerStarted","Data":"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96"} Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.732700 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.732675849 podStartE2EDuration="2.732675849s" podCreationTimestamp="2026-01-21 17:54:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:41.726515907 +0000 UTC m=+1308.352805930" watchObservedRunningTime="2026-01-21 17:54:41.732675849 +0000 UTC m=+1308.358965872" Jan 21 17:54:41 crc kubenswrapper[4799]: I0121 17:54:41.739713 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.228626 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.206:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.228780 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.206:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.286207 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.356017 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-swift-storage-0\") pod \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.356171 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-sb\") pod \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.356210 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-config\") pod \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.356309 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-nb\") pod \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.356419 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhjl5\" (UniqueName: \"kubernetes.io/projected/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-kube-api-access-nhjl5\") pod \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.356474 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-svc\") pod \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\" (UID: \"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876\") " Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.367369 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-kube-api-access-nhjl5" (OuterVolumeSpecName: "kube-api-access-nhjl5") pod "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" (UID: "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876"). InnerVolumeSpecName "kube-api-access-nhjl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.436839 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" (UID: "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.452391 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-config" (OuterVolumeSpecName: "config") pod "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" (UID: "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.459162 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.459198 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.459209 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhjl5\" (UniqueName: \"kubernetes.io/projected/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-kube-api-access-nhjl5\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.486191 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.486557 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="59e07a31-b75b-4e5b-827f-8ce5617a3810" containerName="kube-state-metrics" containerID="cri-o://dc252ed2b77d1a62a0ddc449129ad03216084a08aaf5803006b7b737fe9cd43c" gracePeriod=30 Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.504899 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" (UID: "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.515530 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" (UID: "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.518942 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" (UID: "ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.561347 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.561394 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.561409 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.714563 4799 generic.go:334] "Generic (PLEG): container finished" podID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" containerID="a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80" exitCode=0 Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.714631 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.714655 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" event={"ID":"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876","Type":"ContainerDied","Data":"a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80"} Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.714686 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d9d747c5-mfvl5" event={"ID":"ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876","Type":"ContainerDied","Data":"be5f8bf7ca05c05eda2040f5f939e24576b2fb0173e76a56f8758f77bd57c049"} Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.714702 4799 scope.go:117] "RemoveContainer" containerID="a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.719378 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p5h5t" event={"ID":"fbc0e1b8-d099-4a3b-b501-b8486d893927","Type":"ContainerDied","Data":"2aca9b9dd8679a92295ee233b5fb8fcf01609815d98a37a5e078b1def4d1f871"} Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.719389 4799 generic.go:334] "Generic (PLEG): container finished" podID="fbc0e1b8-d099-4a3b-b501-b8486d893927" containerID="2aca9b9dd8679a92295ee233b5fb8fcf01609815d98a37a5e078b1def4d1f871" exitCode=0 Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.733901 4799 generic.go:334] "Generic (PLEG): container finished" podID="59e07a31-b75b-4e5b-827f-8ce5617a3810" containerID="dc252ed2b77d1a62a0ddc449129ad03216084a08aaf5803006b7b737fe9cd43c" exitCode=2 Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.734802 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"59e07a31-b75b-4e5b-827f-8ce5617a3810","Type":"ContainerDied","Data":"dc252ed2b77d1a62a0ddc449129ad03216084a08aaf5803006b7b737fe9cd43c"} Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.786079 4799 scope.go:117] "RemoveContainer" containerID="af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.790449 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79d9d747c5-mfvl5"] Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.800109 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79d9d747c5-mfvl5"] Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.848598 4799 scope.go:117] "RemoveContainer" containerID="a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80" Jan 21 17:54:42 crc kubenswrapper[4799]: E0121 17:54:42.855265 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80\": container with ID starting with a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80 not found: ID does not exist" containerID="a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.855320 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80"} err="failed to get container status \"a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80\": rpc error: code = NotFound desc = could not find container \"a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80\": container with ID starting with a6ec660917db9b44d4f79ddd35bb42d9da4733377e7e8c758088010a22144c80 not found: ID does not exist" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.855352 4799 scope.go:117] "RemoveContainer" containerID="af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4" Jan 21 17:54:42 crc kubenswrapper[4799]: E0121 17:54:42.863295 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4\": container with ID starting with af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4 not found: ID does not exist" containerID="af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.863334 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4"} err="failed to get container status \"af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4\": rpc error: code = NotFound desc = could not find container \"af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4\": container with ID starting with af370e696c7ec23904c76cb908a8553543f4b976b832196b41e2d1c4736ff2e4 not found: ID does not exist" Jan 21 17:54:42 crc kubenswrapper[4799]: I0121 17:54:42.984776 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.073840 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj95x\" (UniqueName: \"kubernetes.io/projected/59e07a31-b75b-4e5b-827f-8ce5617a3810-kube-api-access-bj95x\") pod \"59e07a31-b75b-4e5b-827f-8ce5617a3810\" (UID: \"59e07a31-b75b-4e5b-827f-8ce5617a3810\") " Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.079636 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59e07a31-b75b-4e5b-827f-8ce5617a3810-kube-api-access-bj95x" (OuterVolumeSpecName: "kube-api-access-bj95x") pod "59e07a31-b75b-4e5b-827f-8ce5617a3810" (UID: "59e07a31-b75b-4e5b-827f-8ce5617a3810"). InnerVolumeSpecName "kube-api-access-bj95x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.176326 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj95x\" (UniqueName: \"kubernetes.io/projected/59e07a31-b75b-4e5b-827f-8ce5617a3810-kube-api-access-bj95x\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.746965 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.746969 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"59e07a31-b75b-4e5b-827f-8ce5617a3810","Type":"ContainerDied","Data":"9253eab859c8ffdc2c89d84dec239b80c07145d6df979aa6b65594e58b50f5b8"} Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.747071 4799 scope.go:117] "RemoveContainer" containerID="dc252ed2b77d1a62a0ddc449129ad03216084a08aaf5803006b7b737fe9cd43c" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.827952 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.845212 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.855588 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:54:43 crc kubenswrapper[4799]: E0121 17:54:43.856028 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59e07a31-b75b-4e5b-827f-8ce5617a3810" containerName="kube-state-metrics" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.856040 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="59e07a31-b75b-4e5b-827f-8ce5617a3810" containerName="kube-state-metrics" Jan 21 17:54:43 crc kubenswrapper[4799]: E0121 17:54:43.856055 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" containerName="init" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.856060 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" containerName="init" Jan 21 17:54:43 crc kubenswrapper[4799]: E0121 17:54:43.856080 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" containerName="dnsmasq-dns" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.856087 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" containerName="dnsmasq-dns" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.856301 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="59e07a31-b75b-4e5b-827f-8ce5617a3810" containerName="kube-state-metrics" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.856335 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" containerName="dnsmasq-dns" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.857081 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.884514 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.884736 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.893788 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.902837 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bhl5\" (UniqueName: \"kubernetes.io/projected/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-api-access-8bhl5\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.902926 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.902982 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:43 crc kubenswrapper[4799]: I0121 17:54:43.903058 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.017403 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.017531 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bhl5\" (UniqueName: \"kubernetes.io/projected/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-api-access-8bhl5\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.017576 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.017615 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.048786 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.049269 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.059934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bhl5\" (UniqueName: \"kubernetes.io/projected/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-kube-api-access-8bhl5\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.071367 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dab8cf8d-7956-4cfb-a107-0e15661fc5f7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dab8cf8d-7956-4cfb-a107-0e15661fc5f7\") " pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.216284 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.243996 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59e07a31-b75b-4e5b-827f-8ce5617a3810" path="/var/lib/kubelet/pods/59e07a31-b75b-4e5b-827f-8ce5617a3810/volumes" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.246536 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876" path="/var/lib/kubelet/pods/ecbae69f-ceb2-4b4e-8b62-ae7ef4bd1876/volumes" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.353663 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.437843 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-combined-ca-bundle\") pod \"fbc0e1b8-d099-4a3b-b501-b8486d893927\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.437903 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-scripts\") pod \"fbc0e1b8-d099-4a3b-b501-b8486d893927\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.438047 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-config-data\") pod \"fbc0e1b8-d099-4a3b-b501-b8486d893927\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.438109 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfs7c\" (UniqueName: \"kubernetes.io/projected/fbc0e1b8-d099-4a3b-b501-b8486d893927-kube-api-access-tfs7c\") pod \"fbc0e1b8-d099-4a3b-b501-b8486d893927\" (UID: \"fbc0e1b8-d099-4a3b-b501-b8486d893927\") " Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.446307 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-scripts" (OuterVolumeSpecName: "scripts") pod "fbc0e1b8-d099-4a3b-b501-b8486d893927" (UID: "fbc0e1b8-d099-4a3b-b501-b8486d893927"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.467690 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbc0e1b8-d099-4a3b-b501-b8486d893927-kube-api-access-tfs7c" (OuterVolumeSpecName: "kube-api-access-tfs7c") pod "fbc0e1b8-d099-4a3b-b501-b8486d893927" (UID: "fbc0e1b8-d099-4a3b-b501-b8486d893927"). InnerVolumeSpecName "kube-api-access-tfs7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.479935 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbc0e1b8-d099-4a3b-b501-b8486d893927" (UID: "fbc0e1b8-d099-4a3b-b501-b8486d893927"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.480226 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-config-data" (OuterVolumeSpecName: "config-data") pod "fbc0e1b8-d099-4a3b-b501-b8486d893927" (UID: "fbc0e1b8-d099-4a3b-b501-b8486d893927"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.541649 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.541683 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.541692 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbc0e1b8-d099-4a3b-b501-b8486d893927-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.541701 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfs7c\" (UniqueName: \"kubernetes.io/projected/fbc0e1b8-d099-4a3b-b501-b8486d893927-kube-api-access-tfs7c\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.732401 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.757448 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p5h5t" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.757441 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p5h5t" event={"ID":"fbc0e1b8-d099-4a3b-b501-b8486d893927","Type":"ContainerDied","Data":"90dcf45488a7734b7e9f9399bfe800446f97f8808aaee5bc7e126fff59614062"} Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.757591 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90dcf45488a7734b7e9f9399bfe800446f97f8808aaee5bc7e126fff59614062" Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.759844 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dab8cf8d-7956-4cfb-a107-0e15661fc5f7","Type":"ContainerStarted","Data":"de739919b1a4fe9b2cccac690ed1d4e3eaf192c3b04c4452631deb43e0994899"} Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.950328 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.950915 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" containerName="nova-scheduler-scheduler" containerID="cri-o://ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d" gracePeriod=30 Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.967828 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.968175 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-log" containerID="cri-o://8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3" gracePeriod=30 Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.968357 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-api" containerID="cri-o://6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f" gracePeriod=30 Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.983795 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.984070 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" containerName="nova-metadata-log" containerID="cri-o://15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96" gracePeriod=30 Jan 21 17:54:44 crc kubenswrapper[4799]: I0121 17:54:44.984141 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" containerName="nova-metadata-metadata" containerID="cri-o://e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6" gracePeriod=30 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.110817 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.110908 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.304001 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.308479 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="ceilometer-notification-agent" containerID="cri-o://6453eb4470b30ca2611cd25fdb7a87d5dc976e5d68a2e62bc39ef8a4d8b2b4ab" gracePeriod=30 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.308481 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="sg-core" containerID="cri-o://b2eb0c516a58245631231bc722aeeeda844bf91add33e0cdb51e488991bd4597" gracePeriod=30 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.308528 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="proxy-httpd" containerID="cri-o://9830c615ed08576c95a23a692cd33245b4618cf25aee13d57428205ece62c9da" gracePeriod=30 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.308428 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="ceilometer-central-agent" containerID="cri-o://995a0eae055fbdc6641b75a43b7ed72ce31134fc842d6f302138e2cb6e355b25" gracePeriod=30 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.654283 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:45 crc kubenswrapper[4799]: E0121 17:54:45.725568 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:54:45 crc kubenswrapper[4799]: E0121 17:54:45.726916 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:54:45 crc kubenswrapper[4799]: E0121 17:54:45.731284 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:54:45 crc kubenswrapper[4799]: E0121 17:54:45.731385 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" containerName="nova-scheduler-scheduler" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.799195 4799 generic.go:334] "Generic (PLEG): container finished" podID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerID="9830c615ed08576c95a23a692cd33245b4618cf25aee13d57428205ece62c9da" exitCode=0 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.799252 4799 generic.go:334] "Generic (PLEG): container finished" podID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerID="b2eb0c516a58245631231bc722aeeeda844bf91add33e0cdb51e488991bd4597" exitCode=2 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.799330 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerDied","Data":"9830c615ed08576c95a23a692cd33245b4618cf25aee13d57428205ece62c9da"} Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.799360 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerDied","Data":"b2eb0c516a58245631231bc722aeeeda844bf91add33e0cdb51e488991bd4597"} Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.805162 4799 generic.go:334] "Generic (PLEG): container finished" podID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerID="8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3" exitCode=143 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.805368 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7","Type":"ContainerDied","Data":"8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3"} Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.808380 4799 generic.go:334] "Generic (PLEG): container finished" podID="99600d96-195c-4b05-b7e4-3fd33763e869" containerID="e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6" exitCode=0 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.808442 4799 generic.go:334] "Generic (PLEG): container finished" podID="99600d96-195c-4b05-b7e4-3fd33763e869" containerID="15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96" exitCode=143 Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.808475 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"99600d96-195c-4b05-b7e4-3fd33763e869","Type":"ContainerDied","Data":"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6"} Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.808509 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"99600d96-195c-4b05-b7e4-3fd33763e869","Type":"ContainerDied","Data":"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96"} Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.808520 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"99600d96-195c-4b05-b7e4-3fd33763e869","Type":"ContainerDied","Data":"e47198c859e05da0ce8d27fbf4c7675d4358d40541b28dc2450c051dc4ddbd52"} Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.808517 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.808537 4799 scope.go:117] "RemoveContainer" containerID="e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.813443 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dab8cf8d-7956-4cfb-a107-0e15661fc5f7","Type":"ContainerStarted","Data":"1c0269950058d0d2f449250e1c437b02a63a7a2957e9a29c58c24c644358f9d6"} Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.813654 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.846693 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.470775083 podStartE2EDuration="2.846670784s" podCreationTimestamp="2026-01-21 17:54:43 +0000 UTC" firstStartedPulling="2026-01-21 17:54:44.737942083 +0000 UTC m=+1311.364232096" lastFinishedPulling="2026-01-21 17:54:45.113837774 +0000 UTC m=+1311.740127797" observedRunningTime="2026-01-21 17:54:45.841633013 +0000 UTC m=+1312.467923036" watchObservedRunningTime="2026-01-21 17:54:45.846670784 +0000 UTC m=+1312.472960807" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.882492 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5t5p2\" (UniqueName: \"kubernetes.io/projected/99600d96-195c-4b05-b7e4-3fd33763e869-kube-api-access-5t5p2\") pod \"99600d96-195c-4b05-b7e4-3fd33763e869\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.883159 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99600d96-195c-4b05-b7e4-3fd33763e869-logs\") pod \"99600d96-195c-4b05-b7e4-3fd33763e869\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.883218 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-nova-metadata-tls-certs\") pod \"99600d96-195c-4b05-b7e4-3fd33763e869\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.883265 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-combined-ca-bundle\") pod \"99600d96-195c-4b05-b7e4-3fd33763e869\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.883327 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-config-data\") pod \"99600d96-195c-4b05-b7e4-3fd33763e869\" (UID: \"99600d96-195c-4b05-b7e4-3fd33763e869\") " Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.887268 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99600d96-195c-4b05-b7e4-3fd33763e869-logs" (OuterVolumeSpecName: "logs") pod "99600d96-195c-4b05-b7e4-3fd33763e869" (UID: "99600d96-195c-4b05-b7e4-3fd33763e869"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.888243 4799 scope.go:117] "RemoveContainer" containerID="15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.888925 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99600d96-195c-4b05-b7e4-3fd33763e869-kube-api-access-5t5p2" (OuterVolumeSpecName: "kube-api-access-5t5p2") pod "99600d96-195c-4b05-b7e4-3fd33763e869" (UID: "99600d96-195c-4b05-b7e4-3fd33763e869"). InnerVolumeSpecName "kube-api-access-5t5p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.915110 4799 scope.go:117] "RemoveContainer" containerID="e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6" Jan 21 17:54:45 crc kubenswrapper[4799]: E0121 17:54:45.916717 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6\": container with ID starting with e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6 not found: ID does not exist" containerID="e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.916772 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6"} err="failed to get container status \"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6\": rpc error: code = NotFound desc = could not find container \"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6\": container with ID starting with e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6 not found: ID does not exist" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.916804 4799 scope.go:117] "RemoveContainer" containerID="15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96" Jan 21 17:54:45 crc kubenswrapper[4799]: E0121 17:54:45.918061 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96\": container with ID starting with 15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96 not found: ID does not exist" containerID="15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.918111 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96"} err="failed to get container status \"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96\": rpc error: code = NotFound desc = could not find container \"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96\": container with ID starting with 15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96 not found: ID does not exist" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.918153 4799 scope.go:117] "RemoveContainer" containerID="e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.918671 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6"} err="failed to get container status \"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6\": rpc error: code = NotFound desc = could not find container \"e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6\": container with ID starting with e04dcd9a3688a906425eed8fcf735fd587ae16527c69b2ba99e8557c708fb1d6 not found: ID does not exist" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.918695 4799 scope.go:117] "RemoveContainer" containerID="15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.918944 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96"} err="failed to get container status \"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96\": rpc error: code = NotFound desc = could not find container \"15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96\": container with ID starting with 15e5166c90bf38e09fb0e193416e211b7badfac8d1e45d8a5a830b9ca94b0c96 not found: ID does not exist" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.921672 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-config-data" (OuterVolumeSpecName: "config-data") pod "99600d96-195c-4b05-b7e4-3fd33763e869" (UID: "99600d96-195c-4b05-b7e4-3fd33763e869"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.923852 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99600d96-195c-4b05-b7e4-3fd33763e869" (UID: "99600d96-195c-4b05-b7e4-3fd33763e869"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.948184 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "99600d96-195c-4b05-b7e4-3fd33763e869" (UID: "99600d96-195c-4b05-b7e4-3fd33763e869"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.986306 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5t5p2\" (UniqueName: \"kubernetes.io/projected/99600d96-195c-4b05-b7e4-3fd33763e869-kube-api-access-5t5p2\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.986342 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99600d96-195c-4b05-b7e4-3fd33763e869-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.986353 4799 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.986362 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:45 crc kubenswrapper[4799]: I0121 17:54:45.986370 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99600d96-195c-4b05-b7e4-3fd33763e869-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.152756 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.174834 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.188742 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:46 crc kubenswrapper[4799]: E0121 17:54:46.189289 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc0e1b8-d099-4a3b-b501-b8486d893927" containerName="nova-manage" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.189314 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc0e1b8-d099-4a3b-b501-b8486d893927" containerName="nova-manage" Jan 21 17:54:46 crc kubenswrapper[4799]: E0121 17:54:46.189346 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" containerName="nova-metadata-metadata" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.189355 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" containerName="nova-metadata-metadata" Jan 21 17:54:46 crc kubenswrapper[4799]: E0121 17:54:46.189379 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" containerName="nova-metadata-log" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.189389 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" containerName="nova-metadata-log" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.189651 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbc0e1b8-d099-4a3b-b501-b8486d893927" containerName="nova-manage" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.189681 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" containerName="nova-metadata-metadata" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.189713 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" containerName="nova-metadata-log" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.190908 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.194097 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.194790 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.234852 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99600d96-195c-4b05-b7e4-3fd33763e869" path="/var/lib/kubelet/pods/99600d96-195c-4b05-b7e4-3fd33763e869/volumes" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.236371 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.292211 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/423606aa-ec3f-4223-a607-b88f5c132e91-logs\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.292326 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.292361 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-config-data\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.292439 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmtz9\" (UniqueName: \"kubernetes.io/projected/423606aa-ec3f-4223-a607-b88f5c132e91-kube-api-access-kmtz9\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.292532 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.393741 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.393942 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/423606aa-ec3f-4223-a607-b88f5c132e91-logs\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.393989 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.394024 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-config-data\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.394051 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmtz9\" (UniqueName: \"kubernetes.io/projected/423606aa-ec3f-4223-a607-b88f5c132e91-kube-api-access-kmtz9\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.394474 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/423606aa-ec3f-4223-a607-b88f5c132e91-logs\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.400510 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.400532 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-config-data\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.400626 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.415809 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmtz9\" (UniqueName: \"kubernetes.io/projected/423606aa-ec3f-4223-a607-b88f5c132e91-kube-api-access-kmtz9\") pod \"nova-metadata-0\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.517628 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.840900 4799 generic.go:334] "Generic (PLEG): container finished" podID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerID="995a0eae055fbdc6641b75a43b7ed72ce31134fc842d6f302138e2cb6e355b25" exitCode=0 Jan 21 17:54:46 crc kubenswrapper[4799]: I0121 17:54:46.842047 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerDied","Data":"995a0eae055fbdc6641b75a43b7ed72ce31134fc842d6f302138e2cb6e355b25"} Jan 21 17:54:47 crc kubenswrapper[4799]: I0121 17:54:47.051576 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:54:47 crc kubenswrapper[4799]: I0121 17:54:47.852901 4799 generic.go:334] "Generic (PLEG): container finished" podID="fecfb45f-5926-41d7-b7c8-317a1a077eaf" containerID="d0e5986d3cb474c2f382f374d9b4eb4d38cb9a20691efe800702ae5913597568" exitCode=0 Jan 21 17:54:47 crc kubenswrapper[4799]: I0121 17:54:47.852976 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" event={"ID":"fecfb45f-5926-41d7-b7c8-317a1a077eaf","Type":"ContainerDied","Data":"d0e5986d3cb474c2f382f374d9b4eb4d38cb9a20691efe800702ae5913597568"} Jan 21 17:54:47 crc kubenswrapper[4799]: I0121 17:54:47.855544 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"423606aa-ec3f-4223-a607-b88f5c132e91","Type":"ContainerStarted","Data":"21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346"} Jan 21 17:54:47 crc kubenswrapper[4799]: I0121 17:54:47.855572 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"423606aa-ec3f-4223-a607-b88f5c132e91","Type":"ContainerStarted","Data":"29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381"} Jan 21 17:54:47 crc kubenswrapper[4799]: I0121 17:54:47.855584 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"423606aa-ec3f-4223-a607-b88f5c132e91","Type":"ContainerStarted","Data":"00e8386cb70c01f86d9592c27a2af76060b7b7e1fc72bede6828b48bd4c00fb7"} Jan 21 17:54:47 crc kubenswrapper[4799]: I0121 17:54:47.890079 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.890056205 podStartE2EDuration="1.890056205s" podCreationTimestamp="2026-01-21 17:54:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:47.882346519 +0000 UTC m=+1314.508636562" watchObservedRunningTime="2026-01-21 17:54:47.890056205 +0000 UTC m=+1314.516346228" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.197654 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.236434 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-config-data\") pod \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.236574 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-combined-ca-bundle\") pod \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.236716 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nmfv\" (UniqueName: \"kubernetes.io/projected/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-kube-api-access-9nmfv\") pod \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.236843 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-logs\") pod \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\" (UID: \"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7\") " Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.238396 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-logs" (OuterVolumeSpecName: "logs") pod "bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" (UID: "bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.241439 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-kube-api-access-9nmfv" (OuterVolumeSpecName: "kube-api-access-9nmfv") pod "bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" (UID: "bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7"). InnerVolumeSpecName "kube-api-access-9nmfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.269618 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" (UID: "bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.272336 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-config-data" (OuterVolumeSpecName: "config-data") pod "bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" (UID: "bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.339465 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.339502 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.339518 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nmfv\" (UniqueName: \"kubernetes.io/projected/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-kube-api-access-9nmfv\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.339528 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.868812 4799 generic.go:334] "Generic (PLEG): container finished" podID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerID="6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f" exitCode=0 Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.869180 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7","Type":"ContainerDied","Data":"6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f"} Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.869242 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7","Type":"ContainerDied","Data":"4aa045fdd0cd0ac8560d178c9f95c69ee4f785353616086b4928a0d5e3521675"} Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.869260 4799 scope.go:117] "RemoveContainer" containerID="6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.869443 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.964489 4799 scope.go:117] "RemoveContainer" containerID="8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3" Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.976242 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:48 crc kubenswrapper[4799]: I0121 17:54:48.988335 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.006380 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:49 crc kubenswrapper[4799]: E0121 17:54:49.006836 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-log" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.006851 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-log" Jan 21 17:54:49 crc kubenswrapper[4799]: E0121 17:54:49.006884 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-api" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.006894 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-api" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.007096 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-log" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.007118 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" containerName="nova-api-api" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.008723 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.011407 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.029557 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.039753 4799 scope.go:117] "RemoveContainer" containerID="6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f" Jan 21 17:54:49 crc kubenswrapper[4799]: E0121 17:54:49.040821 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f\": container with ID starting with 6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f not found: ID does not exist" containerID="6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.040871 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f"} err="failed to get container status \"6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f\": rpc error: code = NotFound desc = could not find container \"6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f\": container with ID starting with 6347cdde9c8946f21784132a3edbfe07824260e19222d691498038bbf1ef839f not found: ID does not exist" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.040900 4799 scope.go:117] "RemoveContainer" containerID="8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3" Jan 21 17:54:49 crc kubenswrapper[4799]: E0121 17:54:49.041401 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3\": container with ID starting with 8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3 not found: ID does not exist" containerID="8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.041447 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3"} err="failed to get container status \"8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3\": rpc error: code = NotFound desc = could not find container \"8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3\": container with ID starting with 8753d0ab3858b42e85079b4b8baf2224acfa8af0c1b7d3dd1d7be2a23ea2d9a3 not found: ID does not exist" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.051539 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-config-data\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.051915 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-logs\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.051970 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-828pq\" (UniqueName: \"kubernetes.io/projected/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-kube-api-access-828pq\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.052044 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.153908 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-config-data\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.154012 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-logs\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.154056 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-828pq\" (UniqueName: \"kubernetes.io/projected/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-kube-api-access-828pq\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.154090 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.154663 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-logs\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.164015 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.170095 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-config-data\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.176113 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-828pq\" (UniqueName: \"kubernetes.io/projected/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-kube-api-access-828pq\") pod \"nova-api-0\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.320842 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.354739 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.469706 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-scripts\") pod \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.470075 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-combined-ca-bundle\") pod \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.470111 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nkbr\" (UniqueName: \"kubernetes.io/projected/fecfb45f-5926-41d7-b7c8-317a1a077eaf-kube-api-access-5nkbr\") pod \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.471530 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-config-data\") pod \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\" (UID: \"fecfb45f-5926-41d7-b7c8-317a1a077eaf\") " Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.475077 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fecfb45f-5926-41d7-b7c8-317a1a077eaf-kube-api-access-5nkbr" (OuterVolumeSpecName: "kube-api-access-5nkbr") pod "fecfb45f-5926-41d7-b7c8-317a1a077eaf" (UID: "fecfb45f-5926-41d7-b7c8-317a1a077eaf"). InnerVolumeSpecName "kube-api-access-5nkbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.475341 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-scripts" (OuterVolumeSpecName: "scripts") pod "fecfb45f-5926-41d7-b7c8-317a1a077eaf" (UID: "fecfb45f-5926-41d7-b7c8-317a1a077eaf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.517876 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fecfb45f-5926-41d7-b7c8-317a1a077eaf" (UID: "fecfb45f-5926-41d7-b7c8-317a1a077eaf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.519118 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-config-data" (OuterVolumeSpecName: "config-data") pod "fecfb45f-5926-41d7-b7c8-317a1a077eaf" (UID: "fecfb45f-5926-41d7-b7c8-317a1a077eaf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.534479 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.572513 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-combined-ca-bundle\") pod \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.572690 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-config-data\") pod \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.572736 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfbnc\" (UniqueName: \"kubernetes.io/projected/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-kube-api-access-hfbnc\") pod \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\" (UID: \"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42\") " Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.573103 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.573149 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.573163 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fecfb45f-5926-41d7-b7c8-317a1a077eaf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.573176 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nkbr\" (UniqueName: \"kubernetes.io/projected/fecfb45f-5926-41d7-b7c8-317a1a077eaf-kube-api-access-5nkbr\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.580549 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-kube-api-access-hfbnc" (OuterVolumeSpecName: "kube-api-access-hfbnc") pod "1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" (UID: "1ed97e8f-9f52-4b26-8f21-b475f4f4ca42"). InnerVolumeSpecName "kube-api-access-hfbnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.608113 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" (UID: "1ed97e8f-9f52-4b26-8f21-b475f4f4ca42"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.610294 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-config-data" (OuterVolumeSpecName: "config-data") pod "1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" (UID: "1ed97e8f-9f52-4b26-8f21-b475f4f4ca42"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.675284 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.675332 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.675345 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfbnc\" (UniqueName: \"kubernetes.io/projected/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42-kube-api-access-hfbnc\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.835876 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:54:49 crc kubenswrapper[4799]: W0121 17:54:49.842074 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73f7fb3c_0d3f_4e84_8def_7a0e2bbb9357.slice/crio-bcafccd5b5a62f040428a51ffff1b372ec3fc39fc6ebe7b145c0178628ceced1 WatchSource:0}: Error finding container bcafccd5b5a62f040428a51ffff1b372ec3fc39fc6ebe7b145c0178628ceced1: Status 404 returned error can't find the container with id bcafccd5b5a62f040428a51ffff1b372ec3fc39fc6ebe7b145c0178628ceced1 Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.882605 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357","Type":"ContainerStarted","Data":"bcafccd5b5a62f040428a51ffff1b372ec3fc39fc6ebe7b145c0178628ceced1"} Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.885606 4799 generic.go:334] "Generic (PLEG): container finished" podID="1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" containerID="ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d" exitCode=0 Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.885636 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.885665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42","Type":"ContainerDied","Data":"ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d"} Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.885700 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1ed97e8f-9f52-4b26-8f21-b475f4f4ca42","Type":"ContainerDied","Data":"2be1f33b22b779fee7812b90945d8d56f2a2935ed30f5b8e5fca0753139d5cec"} Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.885721 4799 scope.go:117] "RemoveContainer" containerID="ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.890860 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" event={"ID":"fecfb45f-5926-41d7-b7c8-317a1a077eaf","Type":"ContainerDied","Data":"d99fb0e90cbfcf9e7205bb0c49cadf667fce8af001b79505003e050e5dd491b5"} Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.890903 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d99fb0e90cbfcf9e7205bb0c49cadf667fce8af001b79505003e050e5dd491b5" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.890966 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-2zbcj" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.930516 4799 scope.go:117] "RemoveContainer" containerID="ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d" Jan 21 17:54:49 crc kubenswrapper[4799]: E0121 17:54:49.945555 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d\": container with ID starting with ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d not found: ID does not exist" containerID="ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.945608 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d"} err="failed to get container status \"ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d\": rpc error: code = NotFound desc = could not find container \"ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d\": container with ID starting with ddf9e459e108f38042f0e6ad5edd9f3266b75357b97daa53ab9d90a5bd54a08d not found: ID does not exist" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.951803 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.968386 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.984357 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:49 crc kubenswrapper[4799]: E0121 17:54:49.984952 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecfb45f-5926-41d7-b7c8-317a1a077eaf" containerName="nova-cell1-conductor-db-sync" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.984966 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecfb45f-5926-41d7-b7c8-317a1a077eaf" containerName="nova-cell1-conductor-db-sync" Jan 21 17:54:49 crc kubenswrapper[4799]: E0121 17:54:49.984985 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" containerName="nova-scheduler-scheduler" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.984991 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" containerName="nova-scheduler-scheduler" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.985197 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="fecfb45f-5926-41d7-b7c8-317a1a077eaf" containerName="nova-cell1-conductor-db-sync" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.985211 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" containerName="nova-scheduler-scheduler" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.986085 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:54:49 crc kubenswrapper[4799]: I0121 17:54:49.991869 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:49.999956 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.003279 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.006783 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.014473 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.035355 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.215116 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbgrm\" (UniqueName: \"kubernetes.io/projected/ff6ad04e-2495-4af0-b908-ee65bb277ebc-kube-api-access-pbgrm\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.215289 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz27f\" (UniqueName: \"kubernetes.io/projected/d6cb4b02-7469-4b56-9bc8-ae205587439c-kube-api-access-cz27f\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.215331 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.215361 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff6ad04e-2495-4af0-b908-ee65bb277ebc-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.215395 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-config-data\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.215430 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff6ad04e-2495-4af0-b908-ee65bb277ebc-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.223185 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ed97e8f-9f52-4b26-8f21-b475f4f4ca42" path="/var/lib/kubelet/pods/1ed97e8f-9f52-4b26-8f21-b475f4f4ca42/volumes" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.224495 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7" path="/var/lib/kubelet/pods/bbef4bbc-9610-4354-a9d8-ffc3f7d1bac7/volumes" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.317776 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbgrm\" (UniqueName: \"kubernetes.io/projected/ff6ad04e-2495-4af0-b908-ee65bb277ebc-kube-api-access-pbgrm\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.318570 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz27f\" (UniqueName: \"kubernetes.io/projected/d6cb4b02-7469-4b56-9bc8-ae205587439c-kube-api-access-cz27f\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.318632 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.318662 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff6ad04e-2495-4af0-b908-ee65bb277ebc-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.318706 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-config-data\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.318744 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff6ad04e-2495-4af0-b908-ee65bb277ebc-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.326874 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff6ad04e-2495-4af0-b908-ee65bb277ebc-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.326879 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-config-data\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.327597 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.328315 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff6ad04e-2495-4af0-b908-ee65bb277ebc-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.341206 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz27f\" (UniqueName: \"kubernetes.io/projected/d6cb4b02-7469-4b56-9bc8-ae205587439c-kube-api-access-cz27f\") pod \"nova-scheduler-0\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.347630 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbgrm\" (UniqueName: \"kubernetes.io/projected/ff6ad04e-2495-4af0-b908-ee65bb277ebc-kube-api-access-pbgrm\") pod \"nova-cell1-conductor-0\" (UID: \"ff6ad04e-2495-4af0-b908-ee65bb277ebc\") " pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.624685 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.635709 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.949751 4799 generic.go:334] "Generic (PLEG): container finished" podID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerID="6453eb4470b30ca2611cd25fdb7a87d5dc976e5d68a2e62bc39ef8a4d8b2b4ab" exitCode=0 Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.949823 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerDied","Data":"6453eb4470b30ca2611cd25fdb7a87d5dc976e5d68a2e62bc39ef8a4d8b2b4ab"} Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.953098 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357","Type":"ContainerStarted","Data":"bfc8d191eea35285a31160c94103323c9bd2d3e4784874a3d7e30c1885da4c2b"} Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.953962 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357","Type":"ContainerStarted","Data":"e3e305e4e777365b0da2214aee0ad6ed3731fb5d17755827239aacc2c2662e6b"} Jan 21 17:54:50 crc kubenswrapper[4799]: I0121 17:54:50.973710 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.973685716 podStartE2EDuration="2.973685716s" podCreationTimestamp="2026-01-21 17:54:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:50.971594288 +0000 UTC m=+1317.597884311" watchObservedRunningTime="2026-01-21 17:54:50.973685716 +0000 UTC m=+1317.599975739" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.132599 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 21 17:54:51 crc kubenswrapper[4799]: W0121 17:54:51.148525 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff6ad04e_2495_4af0_b908_ee65bb277ebc.slice/crio-419c7c319b9e1e5a3b4d050418e70a967ce184bb320194cae7b4144d469a8822 WatchSource:0}: Error finding container 419c7c319b9e1e5a3b4d050418e70a967ce184bb320194cae7b4144d469a8822: Status 404 returned error can't find the container with id 419c7c319b9e1e5a3b4d050418e70a967ce184bb320194cae7b4144d469a8822 Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.267302 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.341918 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.439972 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-run-httpd\") pod \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.440572 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d71d15bb-8612-40d5-b21a-5fe51f6c95d5" (UID: "d71d15bb-8612-40d5-b21a-5fe51f6c95d5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.440745 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-scripts\") pod \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.440868 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-log-httpd\") pod \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.441226 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d71d15bb-8612-40d5-b21a-5fe51f6c95d5" (UID: "d71d15bb-8612-40d5-b21a-5fe51f6c95d5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.441256 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-sg-core-conf-yaml\") pod \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.441352 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-combined-ca-bundle\") pod \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.441678 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-config-data\") pod \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.441708 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rmr2\" (UniqueName: \"kubernetes.io/projected/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-kube-api-access-2rmr2\") pod \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\" (UID: \"d71d15bb-8612-40d5-b21a-5fe51f6c95d5\") " Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.442360 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.442378 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.444382 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-scripts" (OuterVolumeSpecName: "scripts") pod "d71d15bb-8612-40d5-b21a-5fe51f6c95d5" (UID: "d71d15bb-8612-40d5-b21a-5fe51f6c95d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.445993 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-kube-api-access-2rmr2" (OuterVolumeSpecName: "kube-api-access-2rmr2") pod "d71d15bb-8612-40d5-b21a-5fe51f6c95d5" (UID: "d71d15bb-8612-40d5-b21a-5fe51f6c95d5"). InnerVolumeSpecName "kube-api-access-2rmr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.468846 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d71d15bb-8612-40d5-b21a-5fe51f6c95d5" (UID: "d71d15bb-8612-40d5-b21a-5fe51f6c95d5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.514551 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d71d15bb-8612-40d5-b21a-5fe51f6c95d5" (UID: "d71d15bb-8612-40d5-b21a-5fe51f6c95d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.518767 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.518836 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.543639 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rmr2\" (UniqueName: \"kubernetes.io/projected/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-kube-api-access-2rmr2\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.543677 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.543687 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.543695 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.555102 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-config-data" (OuterVolumeSpecName: "config-data") pod "d71d15bb-8612-40d5-b21a-5fe51f6c95d5" (UID: "d71d15bb-8612-40d5-b21a-5fe51f6c95d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.645626 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d71d15bb-8612-40d5-b21a-5fe51f6c95d5-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.971264 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ff6ad04e-2495-4af0-b908-ee65bb277ebc","Type":"ContainerStarted","Data":"0c969700f266d52def781d351b09b25b40859114a688631a997cb9cd1d9f1102"} Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.971393 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ff6ad04e-2495-4af0-b908-ee65bb277ebc","Type":"ContainerStarted","Data":"419c7c319b9e1e5a3b4d050418e70a967ce184bb320194cae7b4144d469a8822"} Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.972152 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.976985 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d71d15bb-8612-40d5-b21a-5fe51f6c95d5","Type":"ContainerDied","Data":"34ae1d6a4490e166982a7e0939a2fadfac41886a6e7117297adde17d82ce8b7c"} Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.977037 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.978199 4799 scope.go:117] "RemoveContainer" containerID="9830c615ed08576c95a23a692cd33245b4618cf25aee13d57428205ece62c9da" Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.984302 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d6cb4b02-7469-4b56-9bc8-ae205587439c","Type":"ContainerStarted","Data":"068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5"} Jan 21 17:54:51 crc kubenswrapper[4799]: I0121 17:54:51.984385 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d6cb4b02-7469-4b56-9bc8-ae205587439c","Type":"ContainerStarted","Data":"401fa791bf8bfa3dab2c912b13713602df79f36e2fbc6d093f572fd96d1759d0"} Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.005975 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.005948942 podStartE2EDuration="3.005948942s" podCreationTimestamp="2026-01-21 17:54:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:51.996377524 +0000 UTC m=+1318.622667547" watchObservedRunningTime="2026-01-21 17:54:52.005948942 +0000 UTC m=+1318.632238965" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.012791 4799 scope.go:117] "RemoveContainer" containerID="b2eb0c516a58245631231bc722aeeeda844bf91add33e0cdb51e488991bd4597" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.028085 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.045106 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.046028 4799 scope.go:117] "RemoveContainer" containerID="6453eb4470b30ca2611cd25fdb7a87d5dc976e5d68a2e62bc39ef8a4d8b2b4ab" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.051920 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.051897171 podStartE2EDuration="3.051897171s" podCreationTimestamp="2026-01-21 17:54:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:54:52.035720997 +0000 UTC m=+1318.662011020" watchObservedRunningTime="2026-01-21 17:54:52.051897171 +0000 UTC m=+1318.678187194" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.073689 4799 scope.go:117] "RemoveContainer" containerID="995a0eae055fbdc6641b75a43b7ed72ce31134fc842d6f302138e2cb6e355b25" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.078482 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:52 crc kubenswrapper[4799]: E0121 17:54:52.078987 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="ceilometer-central-agent" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.079002 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="ceilometer-central-agent" Jan 21 17:54:52 crc kubenswrapper[4799]: E0121 17:54:52.079013 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="proxy-httpd" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.079020 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="proxy-httpd" Jan 21 17:54:52 crc kubenswrapper[4799]: E0121 17:54:52.079043 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="ceilometer-notification-agent" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.079049 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="ceilometer-notification-agent" Jan 21 17:54:52 crc kubenswrapper[4799]: E0121 17:54:52.079064 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="sg-core" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.079071 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="sg-core" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.079332 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="ceilometer-central-agent" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.079353 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="proxy-httpd" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.079384 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="sg-core" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.079406 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" containerName="ceilometer-notification-agent" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.082960 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.086406 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.086647 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.086885 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.089958 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.218965 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d71d15bb-8612-40d5-b21a-5fe51f6c95d5" path="/var/lib/kubelet/pods/d71d15bb-8612-40d5-b21a-5fe51f6c95d5/volumes" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.258521 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.258872 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-config-data\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.258999 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-log-httpd\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.259199 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-run-httpd\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.259258 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.259429 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.259524 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-scripts\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.259549 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dqvh\" (UniqueName: \"kubernetes.io/projected/08e73474-1501-44e2-a1f3-d53c149c2ed8-kube-api-access-4dqvh\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.361462 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-config-data\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.361682 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-log-httpd\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.362595 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-log-httpd\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.363062 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-run-httpd\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.363921 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-run-httpd\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.364211 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.368095 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-config-data\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.372511 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.372834 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.374286 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-scripts\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.374895 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dqvh\" (UniqueName: \"kubernetes.io/projected/08e73474-1501-44e2-a1f3-d53c149c2ed8-kube-api-access-4dqvh\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.375184 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.378790 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.380115 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-scripts\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.380542 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.405603 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dqvh\" (UniqueName: \"kubernetes.io/projected/08e73474-1501-44e2-a1f3-d53c149c2ed8-kube-api-access-4dqvh\") pod \"ceilometer-0\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.430471 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:54:52 crc kubenswrapper[4799]: I0121 17:54:52.932530 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:54:53 crc kubenswrapper[4799]: I0121 17:54:53.001561 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerStarted","Data":"9cbbc41ffad7d0be8e96a0ef69095f0396c2cd6e771eb77df7990de8fdc0256f"} Jan 21 17:54:54 crc kubenswrapper[4799]: I0121 17:54:54.015570 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerStarted","Data":"e1ee834cfcec2ccdec730b3681d1ea54e39b1a1ab5303f8721bfef6f8acce1f7"} Jan 21 17:54:54 crc kubenswrapper[4799]: I0121 17:54:54.015990 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerStarted","Data":"6738287bddbaf0a41b8381e824d218575ef877c74badb99845415c8786f2cd7c"} Jan 21 17:54:54 crc kubenswrapper[4799]: I0121 17:54:54.248835 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 21 17:54:55 crc kubenswrapper[4799]: I0121 17:54:55.027292 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerStarted","Data":"e589326dc82b2741fba075b192f75b10504ab93b8983b3297975219f2985660b"} Jan 21 17:54:55 crc kubenswrapper[4799]: I0121 17:54:55.624754 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 21 17:54:56 crc kubenswrapper[4799]: I0121 17:54:56.040172 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerStarted","Data":"1f868879eb3407085cc889ceabaddc4c5f09612e1757a2452abb1893e0fad878"} Jan 21 17:54:56 crc kubenswrapper[4799]: I0121 17:54:56.040501 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 21 17:54:56 crc kubenswrapper[4799]: I0121 17:54:56.068108 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.442502407 podStartE2EDuration="4.068076873s" podCreationTimestamp="2026-01-21 17:54:52 +0000 UTC" firstStartedPulling="2026-01-21 17:54:52.933512153 +0000 UTC m=+1319.559802186" lastFinishedPulling="2026-01-21 17:54:55.559086629 +0000 UTC m=+1322.185376652" observedRunningTime="2026-01-21 17:54:56.0633442 +0000 UTC m=+1322.689634243" watchObservedRunningTime="2026-01-21 17:54:56.068076873 +0000 UTC m=+1322.694366906" Jan 21 17:54:56 crc kubenswrapper[4799]: I0121 17:54:56.518434 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 21 17:54:56 crc kubenswrapper[4799]: I0121 17:54:56.518524 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 21 17:54:57 crc kubenswrapper[4799]: I0121 17:54:57.530360 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 21 17:54:57 crc kubenswrapper[4799]: I0121 17:54:57.530361 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 21 17:54:59 crc kubenswrapper[4799]: I0121 17:54:59.356154 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 21 17:54:59 crc kubenswrapper[4799]: I0121 17:54:59.356512 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 21 17:55:00 crc kubenswrapper[4799]: I0121 17:55:00.438391 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 21 17:55:00 crc kubenswrapper[4799]: I0121 17:55:00.438424 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 21 17:55:00 crc kubenswrapper[4799]: I0121 17:55:00.625472 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 21 17:55:00 crc kubenswrapper[4799]: I0121 17:55:00.683976 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 21 17:55:00 crc kubenswrapper[4799]: I0121 17:55:00.684099 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 21 17:55:01 crc kubenswrapper[4799]: I0121 17:55:01.182108 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 21 17:55:06 crc kubenswrapper[4799]: I0121 17:55:06.526669 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 21 17:55:06 crc kubenswrapper[4799]: I0121 17:55:06.538162 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 21 17:55:06 crc kubenswrapper[4799]: I0121 17:55:06.542244 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.224707 4799 generic.go:334] "Generic (PLEG): container finished" podID="30d79297-b2b1-4eb9-9d7c-97069febc7df" containerID="66ee79073f4cc6dc8b1141b00917e5bcc81b3041d66a30099086991f7898436d" exitCode=137 Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.224794 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"30d79297-b2b1-4eb9-9d7c-97069febc7df","Type":"ContainerDied","Data":"66ee79073f4cc6dc8b1141b00917e5bcc81b3041d66a30099086991f7898436d"} Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.231905 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.411280 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.511044 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g89ll\" (UniqueName: \"kubernetes.io/projected/30d79297-b2b1-4eb9-9d7c-97069febc7df-kube-api-access-g89ll\") pod \"30d79297-b2b1-4eb9-9d7c-97069febc7df\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.511241 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-config-data\") pod \"30d79297-b2b1-4eb9-9d7c-97069febc7df\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.511988 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-combined-ca-bundle\") pod \"30d79297-b2b1-4eb9-9d7c-97069febc7df\" (UID: \"30d79297-b2b1-4eb9-9d7c-97069febc7df\") " Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.516637 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30d79297-b2b1-4eb9-9d7c-97069febc7df-kube-api-access-g89ll" (OuterVolumeSpecName: "kube-api-access-g89ll") pod "30d79297-b2b1-4eb9-9d7c-97069febc7df" (UID: "30d79297-b2b1-4eb9-9d7c-97069febc7df"). InnerVolumeSpecName "kube-api-access-g89ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.543659 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-config-data" (OuterVolumeSpecName: "config-data") pod "30d79297-b2b1-4eb9-9d7c-97069febc7df" (UID: "30d79297-b2b1-4eb9-9d7c-97069febc7df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.550230 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30d79297-b2b1-4eb9-9d7c-97069febc7df" (UID: "30d79297-b2b1-4eb9-9d7c-97069febc7df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.615077 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g89ll\" (UniqueName: \"kubernetes.io/projected/30d79297-b2b1-4eb9-9d7c-97069febc7df-kube-api-access-g89ll\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.615154 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:07 crc kubenswrapper[4799]: I0121 17:55:07.615173 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30d79297-b2b1-4eb9-9d7c-97069febc7df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.242793 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"30d79297-b2b1-4eb9-9d7c-97069febc7df","Type":"ContainerDied","Data":"9531b7836666087e0c91d5c354d8cb75e4a82ae603d574eb0d6a4079b812df9f"} Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.242919 4799 scope.go:117] "RemoveContainer" containerID="66ee79073f4cc6dc8b1141b00917e5bcc81b3041d66a30099086991f7898436d" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.242819 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.296095 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.318383 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.329394 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:55:08 crc kubenswrapper[4799]: E0121 17:55:08.330055 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d79297-b2b1-4eb9-9d7c-97069febc7df" containerName="nova-cell1-novncproxy-novncproxy" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.330082 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d79297-b2b1-4eb9-9d7c-97069febc7df" containerName="nova-cell1-novncproxy-novncproxy" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.330344 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="30d79297-b2b1-4eb9-9d7c-97069febc7df" containerName="nova-cell1-novncproxy-novncproxy" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.331177 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.335536 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.335621 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.336920 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.340780 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.432581 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvdck\" (UniqueName: \"kubernetes.io/projected/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-kube-api-access-jvdck\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.432758 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.432851 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.432906 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.432937 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.534963 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.535051 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.535120 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvdck\" (UniqueName: \"kubernetes.io/projected/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-kube-api-access-jvdck\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.535240 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.535342 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.539666 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.539940 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.539938 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.540575 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.556829 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvdck\" (UniqueName: \"kubernetes.io/projected/1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9-kube-api-access-jvdck\") pod \"nova-cell1-novncproxy-0\" (UID: \"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9\") " pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:08 crc kubenswrapper[4799]: I0121 17:55:08.663828 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:09 crc kubenswrapper[4799]: I0121 17:55:09.157732 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 21 17:55:09 crc kubenswrapper[4799]: I0121 17:55:09.262060 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9","Type":"ContainerStarted","Data":"f8b6c6fe42e418102c22d90848a1a7ceb4e3b2ac51180117077ee3bfe74a5367"} Jan 21 17:55:09 crc kubenswrapper[4799]: I0121 17:55:09.363033 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 21 17:55:09 crc kubenswrapper[4799]: I0121 17:55:09.363502 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 21 17:55:09 crc kubenswrapper[4799]: I0121 17:55:09.368862 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 21 17:55:09 crc kubenswrapper[4799]: I0121 17:55:09.372300 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.220331 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30d79297-b2b1-4eb9-9d7c-97069febc7df" path="/var/lib/kubelet/pods/30d79297-b2b1-4eb9-9d7c-97069febc7df/volumes" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.280891 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9","Type":"ContainerStarted","Data":"a145cb3ea77b65a1ae33622bd1c22075ba93017435d99076738b4b2fa70894df"} Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.281253 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.292196 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.321506 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.321485809 podStartE2EDuration="2.321485809s" podCreationTimestamp="2026-01-21 17:55:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:55:10.304352229 +0000 UTC m=+1336.930642262" watchObservedRunningTime="2026-01-21 17:55:10.321485809 +0000 UTC m=+1336.947775832" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.510033 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5dbd69cdbc-vhw4k"] Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.527935 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.540360 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dbd69cdbc-vhw4k"] Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.681622 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-nb\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.681728 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-config\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.681764 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-swift-storage-0\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.681782 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-sb\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.681821 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw6rn\" (UniqueName: \"kubernetes.io/projected/82c5bc2f-a942-452f-9904-825ee865bee7-kube-api-access-vw6rn\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.681857 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-svc\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.783597 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-nb\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.783722 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-config\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.783756 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-swift-storage-0\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.783781 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-sb\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.783830 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw6rn\" (UniqueName: \"kubernetes.io/projected/82c5bc2f-a942-452f-9904-825ee865bee7-kube-api-access-vw6rn\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.783873 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-svc\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.784853 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-svc\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.785398 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-nb\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.785909 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-config\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.786446 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-swift-storage-0\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.786948 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-sb\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.819875 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw6rn\" (UniqueName: \"kubernetes.io/projected/82c5bc2f-a942-452f-9904-825ee865bee7-kube-api-access-vw6rn\") pod \"dnsmasq-dns-5dbd69cdbc-vhw4k\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:10 crc kubenswrapper[4799]: I0121 17:55:10.853351 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:11 crc kubenswrapper[4799]: I0121 17:55:11.323528 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dbd69cdbc-vhw4k"] Jan 21 17:55:12 crc kubenswrapper[4799]: I0121 17:55:12.308250 4799 generic.go:334] "Generic (PLEG): container finished" podID="82c5bc2f-a942-452f-9904-825ee865bee7" containerID="4ce41ac713a33a886a720efb9e85c805178b172844ea5fa6d59095971cb434da" exitCode=0 Jan 21 17:55:12 crc kubenswrapper[4799]: I0121 17:55:12.308342 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" event={"ID":"82c5bc2f-a942-452f-9904-825ee865bee7","Type":"ContainerDied","Data":"4ce41ac713a33a886a720efb9e85c805178b172844ea5fa6d59095971cb434da"} Jan 21 17:55:12 crc kubenswrapper[4799]: I0121 17:55:12.308618 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" event={"ID":"82c5bc2f-a942-452f-9904-825ee865bee7","Type":"ContainerStarted","Data":"7838c8b52718de434a27478284b951ffc3e81b3431d82fb149059c75392ff9b4"} Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.155982 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.157301 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="ceilometer-central-agent" containerID="cri-o://6738287bddbaf0a41b8381e824d218575ef877c74badb99845415c8786f2cd7c" gracePeriod=30 Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.157364 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="proxy-httpd" containerID="cri-o://1f868879eb3407085cc889ceabaddc4c5f09612e1757a2452abb1893e0fad878" gracePeriod=30 Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.157414 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="sg-core" containerID="cri-o://e589326dc82b2741fba075b192f75b10504ab93b8983b3297975219f2985660b" gracePeriod=30 Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.157491 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="ceilometer-notification-agent" containerID="cri-o://e1ee834cfcec2ccdec730b3681d1ea54e39b1a1ab5303f8721bfef6f8acce1f7" gracePeriod=30 Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.166739 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.215:3000/\": EOF" Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.319432 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" event={"ID":"82c5bc2f-a942-452f-9904-825ee865bee7","Type":"ContainerStarted","Data":"8257368547dac2a98a11908961d8e956daf1070dcfa3a74cb1275428ab2cc9fb"} Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.320820 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.322210 4799 generic.go:334] "Generic (PLEG): container finished" podID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerID="1f868879eb3407085cc889ceabaddc4c5f09612e1757a2452abb1893e0fad878" exitCode=0 Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.322285 4799 generic.go:334] "Generic (PLEG): container finished" podID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerID="e589326dc82b2741fba075b192f75b10504ab93b8983b3297975219f2985660b" exitCode=2 Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.322302 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerDied","Data":"1f868879eb3407085cc889ceabaddc4c5f09612e1757a2452abb1893e0fad878"} Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.322354 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerDied","Data":"e589326dc82b2741fba075b192f75b10504ab93b8983b3297975219f2985660b"} Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.322467 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-log" containerID="cri-o://e3e305e4e777365b0da2214aee0ad6ed3731fb5d17755827239aacc2c2662e6b" gracePeriod=30 Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.322601 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-api" containerID="cri-o://bfc8d191eea35285a31160c94103323c9bd2d3e4784874a3d7e30c1885da4c2b" gracePeriod=30 Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.363393 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" podStartSLOduration=3.36336694 podStartE2EDuration="3.36336694s" podCreationTimestamp="2026-01-21 17:55:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:55:13.348292147 +0000 UTC m=+1339.974582170" watchObservedRunningTime="2026-01-21 17:55:13.36336694 +0000 UTC m=+1339.989656953" Jan 21 17:55:13 crc kubenswrapper[4799]: I0121 17:55:13.664425 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:14 crc kubenswrapper[4799]: I0121 17:55:14.336086 4799 generic.go:334] "Generic (PLEG): container finished" podID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerID="6738287bddbaf0a41b8381e824d218575ef877c74badb99845415c8786f2cd7c" exitCode=0 Jan 21 17:55:14 crc kubenswrapper[4799]: I0121 17:55:14.336185 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerDied","Data":"6738287bddbaf0a41b8381e824d218575ef877c74badb99845415c8786f2cd7c"} Jan 21 17:55:14 crc kubenswrapper[4799]: I0121 17:55:14.338940 4799 generic.go:334] "Generic (PLEG): container finished" podID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerID="e3e305e4e777365b0da2214aee0ad6ed3731fb5d17755827239aacc2c2662e6b" exitCode=143 Jan 21 17:55:14 crc kubenswrapper[4799]: I0121 17:55:14.339025 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357","Type":"ContainerDied","Data":"e3e305e4e777365b0da2214aee0ad6ed3731fb5d17755827239aacc2c2662e6b"} Jan 21 17:55:14 crc kubenswrapper[4799]: I0121 17:55:14.339310 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.381343 4799 generic.go:334] "Generic (PLEG): container finished" podID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerID="bfc8d191eea35285a31160c94103323c9bd2d3e4784874a3d7e30c1885da4c2b" exitCode=0 Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.381710 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357","Type":"ContainerDied","Data":"bfc8d191eea35285a31160c94103323c9bd2d3e4784874a3d7e30c1885da4c2b"} Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.393795 4799 generic.go:334] "Generic (PLEG): container finished" podID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerID="e1ee834cfcec2ccdec730b3681d1ea54e39b1a1ab5303f8721bfef6f8acce1f7" exitCode=0 Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.394921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerDied","Data":"e1ee834cfcec2ccdec730b3681d1ea54e39b1a1ab5303f8721bfef6f8acce1f7"} Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.394948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08e73474-1501-44e2-a1f3-d53c149c2ed8","Type":"ContainerDied","Data":"9cbbc41ffad7d0be8e96a0ef69095f0396c2cd6e771eb77df7990de8fdc0256f"} Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.394961 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cbbc41ffad7d0be8e96a0ef69095f0396c2cd6e771eb77df7990de8fdc0256f" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.462598 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.600409 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.616461 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-log-httpd\") pod \"08e73474-1501-44e2-a1f3-d53c149c2ed8\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.616538 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-sg-core-conf-yaml\") pod \"08e73474-1501-44e2-a1f3-d53c149c2ed8\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.616597 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dqvh\" (UniqueName: \"kubernetes.io/projected/08e73474-1501-44e2-a1f3-d53c149c2ed8-kube-api-access-4dqvh\") pod \"08e73474-1501-44e2-a1f3-d53c149c2ed8\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.616691 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-combined-ca-bundle\") pod \"08e73474-1501-44e2-a1f3-d53c149c2ed8\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.616743 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-run-httpd\") pod \"08e73474-1501-44e2-a1f3-d53c149c2ed8\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.616800 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-scripts\") pod \"08e73474-1501-44e2-a1f3-d53c149c2ed8\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.616920 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-ceilometer-tls-certs\") pod \"08e73474-1501-44e2-a1f3-d53c149c2ed8\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.616946 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "08e73474-1501-44e2-a1f3-d53c149c2ed8" (UID: "08e73474-1501-44e2-a1f3-d53c149c2ed8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.617028 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-config-data\") pod \"08e73474-1501-44e2-a1f3-d53c149c2ed8\" (UID: \"08e73474-1501-44e2-a1f3-d53c149c2ed8\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.617466 4799 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.617444 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "08e73474-1501-44e2-a1f3-d53c149c2ed8" (UID: "08e73474-1501-44e2-a1f3-d53c149c2ed8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.631611 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08e73474-1501-44e2-a1f3-d53c149c2ed8-kube-api-access-4dqvh" (OuterVolumeSpecName: "kube-api-access-4dqvh") pod "08e73474-1501-44e2-a1f3-d53c149c2ed8" (UID: "08e73474-1501-44e2-a1f3-d53c149c2ed8"). InnerVolumeSpecName "kube-api-access-4dqvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.659364 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-scripts" (OuterVolumeSpecName: "scripts") pod "08e73474-1501-44e2-a1f3-d53c149c2ed8" (UID: "08e73474-1501-44e2-a1f3-d53c149c2ed8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.680162 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "08e73474-1501-44e2-a1f3-d53c149c2ed8" (UID: "08e73474-1501-44e2-a1f3-d53c149c2ed8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.719370 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-combined-ca-bundle\") pod \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.720157 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-828pq\" (UniqueName: \"kubernetes.io/projected/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-kube-api-access-828pq\") pod \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.720369 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-config-data\") pod \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.721250 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-logs\") pod \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\" (UID: \"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357\") " Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.721779 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "08e73474-1501-44e2-a1f3-d53c149c2ed8" (UID: "08e73474-1501-44e2-a1f3-d53c149c2ed8"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.722098 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-logs" (OuterVolumeSpecName: "logs") pod "73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" (UID: "73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.722650 4799 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.722732 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dqvh\" (UniqueName: \"kubernetes.io/projected/08e73474-1501-44e2-a1f3-d53c149c2ed8-kube-api-access-4dqvh\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.722810 4799 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08e73474-1501-44e2-a1f3-d53c149c2ed8-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.722877 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.722945 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.723005 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.724661 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-kube-api-access-828pq" (OuterVolumeSpecName: "kube-api-access-828pq") pod "73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" (UID: "73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357"). InnerVolumeSpecName "kube-api-access-828pq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.741385 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08e73474-1501-44e2-a1f3-d53c149c2ed8" (UID: "08e73474-1501-44e2-a1f3-d53c149c2ed8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.754462 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" (UID: "73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.754485 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-config-data" (OuterVolumeSpecName: "config-data") pod "73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" (UID: "73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.778214 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-config-data" (OuterVolumeSpecName: "config-data") pod "08e73474-1501-44e2-a1f3-d53c149c2ed8" (UID: "08e73474-1501-44e2-a1f3-d53c149c2ed8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.825361 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.825405 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-828pq\" (UniqueName: \"kubernetes.io/projected/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-kube-api-access-828pq\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.825420 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.825428 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:15 crc kubenswrapper[4799]: I0121 17:55:15.825438 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08e73474-1501-44e2-a1f3-d53c149c2ed8-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.405861 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.405881 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357","Type":"ContainerDied","Data":"bcafccd5b5a62f040428a51ffff1b372ec3fc39fc6ebe7b145c0178628ceced1"} Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.405882 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.405997 4799 scope.go:117] "RemoveContainer" containerID="bfc8d191eea35285a31160c94103323c9bd2d3e4784874a3d7e30c1885da4c2b" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.431535 4799 scope.go:117] "RemoveContainer" containerID="e3e305e4e777365b0da2214aee0ad6ed3731fb5d17755827239aacc2c2662e6b" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.466947 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.491950 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.508261 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.517321 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.544648 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:55:16 crc kubenswrapper[4799]: E0121 17:55:16.545259 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-log" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545282 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-log" Jan 21 17:55:16 crc kubenswrapper[4799]: E0121 17:55:16.545328 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-api" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545337 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-api" Jan 21 17:55:16 crc kubenswrapper[4799]: E0121 17:55:16.545355 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="ceilometer-central-agent" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545363 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="ceilometer-central-agent" Jan 21 17:55:16 crc kubenswrapper[4799]: E0121 17:55:16.545379 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="proxy-httpd" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545386 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="proxy-httpd" Jan 21 17:55:16 crc kubenswrapper[4799]: E0121 17:55:16.545400 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="sg-core" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545407 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="sg-core" Jan 21 17:55:16 crc kubenswrapper[4799]: E0121 17:55:16.545418 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="ceilometer-notification-agent" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545426 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="ceilometer-notification-agent" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545646 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="ceilometer-notification-agent" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545680 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="ceilometer-central-agent" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545702 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="sg-core" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545710 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-log" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545724 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" containerName="nova-api-api" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.545740 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" containerName="proxy-httpd" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.548019 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.550394 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.550777 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.551371 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.560177 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.570758 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.576960 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.579734 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.579998 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.580187 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.582736 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.656312 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.656409 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-scripts\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.656445 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/04163938-d340-4731-82c4-e01a636b7df2-log-httpd\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.656492 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-config-data\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.656524 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dq8j\" (UniqueName: \"kubernetes.io/projected/04163938-d340-4731-82c4-e01a636b7df2-kube-api-access-2dq8j\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.656547 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.656609 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/04163938-d340-4731-82c4-e01a636b7df2-run-httpd\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.656703 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.757955 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-public-tls-certs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758299 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/04163938-d340-4731-82c4-e01a636b7df2-run-httpd\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758355 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758406 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-config-data\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758425 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-internal-tls-certs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758459 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857bdfa7-ef8d-407d-ad68-3d401fad4c43-logs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758486 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758507 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t59fr\" (UniqueName: \"kubernetes.io/projected/857bdfa7-ef8d-407d-ad68-3d401fad4c43-kube-api-access-t59fr\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758532 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758570 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-scripts\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758593 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/04163938-d340-4731-82c4-e01a636b7df2-log-httpd\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758626 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-config-data\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758649 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dq8j\" (UniqueName: \"kubernetes.io/projected/04163938-d340-4731-82c4-e01a636b7df2-kube-api-access-2dq8j\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.758673 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.759188 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/04163938-d340-4731-82c4-e01a636b7df2-run-httpd\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.760260 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/04163938-d340-4731-82c4-e01a636b7df2-log-httpd\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.765927 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-scripts\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.767056 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-config-data\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.770548 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.770923 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.772625 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04163938-d340-4731-82c4-e01a636b7df2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.789075 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dq8j\" (UniqueName: \"kubernetes.io/projected/04163938-d340-4731-82c4-e01a636b7df2-kube-api-access-2dq8j\") pod \"ceilometer-0\" (UID: \"04163938-d340-4731-82c4-e01a636b7df2\") " pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.860143 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-public-tls-certs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.860498 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-config-data\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.860778 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-internal-tls-certs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.861471 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857bdfa7-ef8d-407d-ad68-3d401fad4c43-logs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.861591 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.861674 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t59fr\" (UniqueName: \"kubernetes.io/projected/857bdfa7-ef8d-407d-ad68-3d401fad4c43-kube-api-access-t59fr\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.862047 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857bdfa7-ef8d-407d-ad68-3d401fad4c43-logs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.866328 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.866953 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-public-tls-certs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.867260 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-config-data\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.874648 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-internal-tls-certs\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.877840 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.886103 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t59fr\" (UniqueName: \"kubernetes.io/projected/857bdfa7-ef8d-407d-ad68-3d401fad4c43-kube-api-access-t59fr\") pod \"nova-api-0\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " pod="openstack/nova-api-0" Jan 21 17:55:16 crc kubenswrapper[4799]: I0121 17:55:16.898371 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:55:17 crc kubenswrapper[4799]: I0121 17:55:17.396032 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 21 17:55:17 crc kubenswrapper[4799]: I0121 17:55:17.419109 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"04163938-d340-4731-82c4-e01a636b7df2","Type":"ContainerStarted","Data":"0e270556cf9559a058d879eee73b116aad101ddfb8d633e2cb0da1b58b2ea8b7"} Jan 21 17:55:17 crc kubenswrapper[4799]: I0121 17:55:17.459303 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.219390 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08e73474-1501-44e2-a1f3-d53c149c2ed8" path="/var/lib/kubelet/pods/08e73474-1501-44e2-a1f3-d53c149c2ed8/volumes" Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.220772 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357" path="/var/lib/kubelet/pods/73f7fb3c-0d3f-4e84-8def-7a0e2bbb9357/volumes" Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.435663 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"04163938-d340-4731-82c4-e01a636b7df2","Type":"ContainerStarted","Data":"cb75db54cf93256c7750cff9d5228c757d2aac1166003fa517c7f2017fd76ed0"} Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.435753 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"04163938-d340-4731-82c4-e01a636b7df2","Type":"ContainerStarted","Data":"9547134e298ce034b8358583020fc4f89c2cbff5ebd8edf77ca5b1e22c368833"} Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.440055 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"857bdfa7-ef8d-407d-ad68-3d401fad4c43","Type":"ContainerStarted","Data":"aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6"} Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.440087 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"857bdfa7-ef8d-407d-ad68-3d401fad4c43","Type":"ContainerStarted","Data":"88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af"} Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.440101 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"857bdfa7-ef8d-407d-ad68-3d401fad4c43","Type":"ContainerStarted","Data":"f5eff417197244cdb084b04482a5a56fe3a8efa29d677456826c56de9b7f8007"} Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.467063 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.467044747 podStartE2EDuration="2.467044747s" podCreationTimestamp="2026-01-21 17:55:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:55:18.464781934 +0000 UTC m=+1345.091071957" watchObservedRunningTime="2026-01-21 17:55:18.467044747 +0000 UTC m=+1345.093334760" Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.664623 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:18 crc kubenswrapper[4799]: I0121 17:55:18.683413 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.457616 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"04163938-d340-4731-82c4-e01a636b7df2","Type":"ContainerStarted","Data":"d1f4419c1628d809c3da7d7da9208abda3894958670f3d690cc4495adeba5bdf"} Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.577910 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.816620 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-24qlw"] Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.817955 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.820046 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.821163 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.840420 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-24qlw"] Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.933802 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.934150 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-scripts\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.934308 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-config-data\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:19 crc kubenswrapper[4799]: I0121 17:55:19.934534 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9jdj\" (UniqueName: \"kubernetes.io/projected/1faad361-9dfd-4168-aaa4-626082473a62-kube-api-access-j9jdj\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.037280 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.037370 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-scripts\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.037423 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-config-data\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.037468 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9jdj\" (UniqueName: \"kubernetes.io/projected/1faad361-9dfd-4168-aaa4-626082473a62-kube-api-access-j9jdj\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.048985 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.053460 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-scripts\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.055569 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9jdj\" (UniqueName: \"kubernetes.io/projected/1faad361-9dfd-4168-aaa4-626082473a62-kube-api-access-j9jdj\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.061792 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-config-data\") pod \"nova-cell1-cell-mapping-24qlw\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:20 crc kubenswrapper[4799]: I0121 17:55:20.147104 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:20.470442 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"04163938-d340-4731-82c4-e01a636b7df2","Type":"ContainerStarted","Data":"96d4c171d2068ef43bf3f697bd94ed0180b5a864d8d895e81d45bb19ddca5017"} Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:20.471049 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:20.516454 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.261302187 podStartE2EDuration="4.516438716s" podCreationTimestamp="2026-01-21 17:55:16 +0000 UTC" firstStartedPulling="2026-01-21 17:55:17.404049979 +0000 UTC m=+1344.030340002" lastFinishedPulling="2026-01-21 17:55:19.659186508 +0000 UTC m=+1346.285476531" observedRunningTime="2026-01-21 17:55:20.51301305 +0000 UTC m=+1347.139303073" watchObservedRunningTime="2026-01-21 17:55:20.516438716 +0000 UTC m=+1347.142728739" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:20.663435 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-24qlw"] Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:20.855356 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.081101 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-566f9f46c9-qqp64"] Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.081772 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" podUID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerName="dnsmasq-dns" containerID="cri-o://6d4a59a84df18cc8dddecc6170657feb25ce09ebb8106c817a62db9e46a34a40" gracePeriod=10 Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.170846 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" podUID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.207:5353: connect: connection refused" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.481019 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-24qlw" event={"ID":"1faad361-9dfd-4168-aaa4-626082473a62","Type":"ContainerStarted","Data":"8d246599ecd08657078b8e4170b654092c811f5122d25bfbbdfdedfcf16646ef"} Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.481073 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-24qlw" event={"ID":"1faad361-9dfd-4168-aaa4-626082473a62","Type":"ContainerStarted","Data":"06a6c4b950e0608957246e2561d1d9cc23aa27c7ba1ead4ac9b30c8217f1db4b"} Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.486900 4799 generic.go:334] "Generic (PLEG): container finished" podID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerID="6d4a59a84df18cc8dddecc6170657feb25ce09ebb8106c817a62db9e46a34a40" exitCode=0 Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.486951 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" event={"ID":"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c","Type":"ContainerDied","Data":"6d4a59a84df18cc8dddecc6170657feb25ce09ebb8106c817a62db9e46a34a40"} Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.511857 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-24qlw" podStartSLOduration=2.5118375090000002 podStartE2EDuration="2.511837509s" podCreationTimestamp="2026-01-21 17:55:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:55:21.499313908 +0000 UTC m=+1348.125603941" watchObservedRunningTime="2026-01-21 17:55:21.511837509 +0000 UTC m=+1348.138127532" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.736907 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.900680 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-sb\") pod \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.900974 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-swift-storage-0\") pod \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.901148 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-config\") pod \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.901286 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-nb\") pod \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.901384 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h52cw\" (UniqueName: \"kubernetes.io/projected/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-kube-api-access-h52cw\") pod \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.901495 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-svc\") pod \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\" (UID: \"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c\") " Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.907608 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-kube-api-access-h52cw" (OuterVolumeSpecName: "kube-api-access-h52cw") pod "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" (UID: "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c"). InnerVolumeSpecName "kube-api-access-h52cw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.992168 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" (UID: "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:55:21 crc kubenswrapper[4799]: I0121 17:55:21.993926 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" (UID: "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.007635 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.007666 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h52cw\" (UniqueName: \"kubernetes.io/projected/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-kube-api-access-h52cw\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.007678 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.015762 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" (UID: "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.016656 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" (UID: "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.029262 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-config" (OuterVolumeSpecName: "config") pod "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" (UID: "cbe752ec-bf40-44cc-b1df-c3d7e2acc01c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.113655 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.113695 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.113705 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.502292 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.502324 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566f9f46c9-qqp64" event={"ID":"cbe752ec-bf40-44cc-b1df-c3d7e2acc01c","Type":"ContainerDied","Data":"a5260133e461b86b6e3056ef07646624676a925afae4d955d37ae123b395ba4d"} Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.502679 4799 scope.go:117] "RemoveContainer" containerID="6d4a59a84df18cc8dddecc6170657feb25ce09ebb8106c817a62db9e46a34a40" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.526372 4799 scope.go:117] "RemoveContainer" containerID="7e978663da3983a470fa4ce6f971f373759a5d234e2561fc446133dd6f35ab6d" Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.543816 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-566f9f46c9-qqp64"] Jan 21 17:55:22 crc kubenswrapper[4799]: I0121 17:55:22.551574 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-566f9f46c9-qqp64"] Jan 21 17:55:24 crc kubenswrapper[4799]: I0121 17:55:24.220285 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" path="/var/lib/kubelet/pods/cbe752ec-bf40-44cc-b1df-c3d7e2acc01c/volumes" Jan 21 17:55:26 crc kubenswrapper[4799]: I0121 17:55:26.898600 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 21 17:55:26 crc kubenswrapper[4799]: I0121 17:55:26.898909 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 21 17:55:27 crc kubenswrapper[4799]: I0121 17:55:27.569261 4799 generic.go:334] "Generic (PLEG): container finished" podID="1faad361-9dfd-4168-aaa4-626082473a62" containerID="8d246599ecd08657078b8e4170b654092c811f5122d25bfbbdfdedfcf16646ef" exitCode=0 Jan 21 17:55:27 crc kubenswrapper[4799]: I0121 17:55:27.569361 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-24qlw" event={"ID":"1faad361-9dfd-4168-aaa4-626082473a62","Type":"ContainerDied","Data":"8d246599ecd08657078b8e4170b654092c811f5122d25bfbbdfdedfcf16646ef"} Jan 21 17:55:27 crc kubenswrapper[4799]: I0121 17:55:27.916315 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.219:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 21 17:55:27 crc kubenswrapper[4799]: I0121 17:55:27.916365 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.219:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.000024 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.109767 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-config-data\") pod \"1faad361-9dfd-4168-aaa4-626082473a62\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.109923 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-scripts\") pod \"1faad361-9dfd-4168-aaa4-626082473a62\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.109982 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9jdj\" (UniqueName: \"kubernetes.io/projected/1faad361-9dfd-4168-aaa4-626082473a62-kube-api-access-j9jdj\") pod \"1faad361-9dfd-4168-aaa4-626082473a62\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.110281 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-combined-ca-bundle\") pod \"1faad361-9dfd-4168-aaa4-626082473a62\" (UID: \"1faad361-9dfd-4168-aaa4-626082473a62\") " Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.117627 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1faad361-9dfd-4168-aaa4-626082473a62-kube-api-access-j9jdj" (OuterVolumeSpecName: "kube-api-access-j9jdj") pod "1faad361-9dfd-4168-aaa4-626082473a62" (UID: "1faad361-9dfd-4168-aaa4-626082473a62"). InnerVolumeSpecName "kube-api-access-j9jdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.125288 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-scripts" (OuterVolumeSpecName: "scripts") pod "1faad361-9dfd-4168-aaa4-626082473a62" (UID: "1faad361-9dfd-4168-aaa4-626082473a62"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.183320 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1faad361-9dfd-4168-aaa4-626082473a62" (UID: "1faad361-9dfd-4168-aaa4-626082473a62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.195212 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-config-data" (OuterVolumeSpecName: "config-data") pod "1faad361-9dfd-4168-aaa4-626082473a62" (UID: "1faad361-9dfd-4168-aaa4-626082473a62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.212587 4799 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-scripts\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.212637 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9jdj\" (UniqueName: \"kubernetes.io/projected/1faad361-9dfd-4168-aaa4-626082473a62-kube-api-access-j9jdj\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.212675 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.212685 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1faad361-9dfd-4168-aaa4-626082473a62-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.600862 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-24qlw" event={"ID":"1faad361-9dfd-4168-aaa4-626082473a62","Type":"ContainerDied","Data":"06a6c4b950e0608957246e2561d1d9cc23aa27c7ba1ead4ac9b30c8217f1db4b"} Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.601239 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06a6c4b950e0608957246e2561d1d9cc23aa27c7ba1ead4ac9b30c8217f1db4b" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.600969 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-24qlw" Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.785233 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.785586 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d6cb4b02-7469-4b56-9bc8-ae205587439c" containerName="nova-scheduler-scheduler" containerID="cri-o://068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" gracePeriod=30 Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.805902 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.806593 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-log" containerID="cri-o://88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af" gracePeriod=30 Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.806664 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-api" containerID="cri-o://aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6" gracePeriod=30 Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.837715 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.837969 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-log" containerID="cri-o://29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381" gracePeriod=30 Jan 21 17:55:29 crc kubenswrapper[4799]: I0121 17:55:29.838041 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-metadata" containerID="cri-o://21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346" gracePeriod=30 Jan 21 17:55:30 crc kubenswrapper[4799]: I0121 17:55:30.612349 4799 generic.go:334] "Generic (PLEG): container finished" podID="423606aa-ec3f-4223-a607-b88f5c132e91" containerID="29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381" exitCode=143 Jan 21 17:55:30 crc kubenswrapper[4799]: I0121 17:55:30.612441 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"423606aa-ec3f-4223-a607-b88f5c132e91","Type":"ContainerDied","Data":"29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381"} Jan 21 17:55:30 crc kubenswrapper[4799]: I0121 17:55:30.614613 4799 generic.go:334] "Generic (PLEG): container finished" podID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerID="88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af" exitCode=143 Jan 21 17:55:30 crc kubenswrapper[4799]: I0121 17:55:30.614657 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"857bdfa7-ef8d-407d-ad68-3d401fad4c43","Type":"ContainerDied","Data":"88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af"} Jan 21 17:55:30 crc kubenswrapper[4799]: E0121 17:55:30.627350 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:55:30 crc kubenswrapper[4799]: E0121 17:55:30.629376 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:55:30 crc kubenswrapper[4799]: E0121 17:55:30.630878 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:55:30 crc kubenswrapper[4799]: E0121 17:55:30.630952 4799 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d6cb4b02-7469-4b56-9bc8-ae205587439c" containerName="nova-scheduler-scheduler" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.233982 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.366934 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-combined-ca-bundle\") pod \"423606aa-ec3f-4223-a607-b88f5c132e91\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.367035 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-config-data\") pod \"423606aa-ec3f-4223-a607-b88f5c132e91\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.367076 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/423606aa-ec3f-4223-a607-b88f5c132e91-logs\") pod \"423606aa-ec3f-4223-a607-b88f5c132e91\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.367252 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmtz9\" (UniqueName: \"kubernetes.io/projected/423606aa-ec3f-4223-a607-b88f5c132e91-kube-api-access-kmtz9\") pod \"423606aa-ec3f-4223-a607-b88f5c132e91\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.367301 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-nova-metadata-tls-certs\") pod \"423606aa-ec3f-4223-a607-b88f5c132e91\" (UID: \"423606aa-ec3f-4223-a607-b88f5c132e91\") " Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.368704 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/423606aa-ec3f-4223-a607-b88f5c132e91-logs" (OuterVolumeSpecName: "logs") pod "423606aa-ec3f-4223-a607-b88f5c132e91" (UID: "423606aa-ec3f-4223-a607-b88f5c132e91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.373095 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/423606aa-ec3f-4223-a607-b88f5c132e91-kube-api-access-kmtz9" (OuterVolumeSpecName: "kube-api-access-kmtz9") pod "423606aa-ec3f-4223-a607-b88f5c132e91" (UID: "423606aa-ec3f-4223-a607-b88f5c132e91"). InnerVolumeSpecName "kube-api-access-kmtz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.408842 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "423606aa-ec3f-4223-a607-b88f5c132e91" (UID: "423606aa-ec3f-4223-a607-b88f5c132e91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.411572 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-config-data" (OuterVolumeSpecName: "config-data") pod "423606aa-ec3f-4223-a607-b88f5c132e91" (UID: "423606aa-ec3f-4223-a607-b88f5c132e91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.423214 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "423606aa-ec3f-4223-a607-b88f5c132e91" (UID: "423606aa-ec3f-4223-a607-b88f5c132e91"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.469056 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmtz9\" (UniqueName: \"kubernetes.io/projected/423606aa-ec3f-4223-a607-b88f5c132e91-kube-api-access-kmtz9\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.469085 4799 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.469095 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.469103 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423606aa-ec3f-4223-a607-b88f5c132e91-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.469116 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/423606aa-ec3f-4223-a607-b88f5c132e91-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.625101 4799 generic.go:334] "Generic (PLEG): container finished" podID="423606aa-ec3f-4223-a607-b88f5c132e91" containerID="21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346" exitCode=0 Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.625165 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.625160 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"423606aa-ec3f-4223-a607-b88f5c132e91","Type":"ContainerDied","Data":"21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346"} Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.625206 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"423606aa-ec3f-4223-a607-b88f5c132e91","Type":"ContainerDied","Data":"00e8386cb70c01f86d9592c27a2af76060b7b7e1fc72bede6828b48bd4c00fb7"} Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.625225 4799 scope.go:117] "RemoveContainer" containerID="21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.655316 4799 scope.go:117] "RemoveContainer" containerID="29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.665416 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.680924 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.681940 4799 scope.go:117] "RemoveContainer" containerID="21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346" Jan 21 17:55:31 crc kubenswrapper[4799]: E0121 17:55:31.682463 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346\": container with ID starting with 21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346 not found: ID does not exist" containerID="21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.682548 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346"} err="failed to get container status \"21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346\": rpc error: code = NotFound desc = could not find container \"21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346\": container with ID starting with 21d712afc7e7f095d223324a51720ecff07a038f5c5035d421357a77be5de346 not found: ID does not exist" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.682594 4799 scope.go:117] "RemoveContainer" containerID="29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381" Jan 21 17:55:31 crc kubenswrapper[4799]: E0121 17:55:31.683043 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381\": container with ID starting with 29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381 not found: ID does not exist" containerID="29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.683100 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381"} err="failed to get container status \"29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381\": rpc error: code = NotFound desc = could not find container \"29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381\": container with ID starting with 29abfd725d357b561e6ed4a0cf9fef9ec7f9ba0f12e6e33a57293256762e2381 not found: ID does not exist" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.698304 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:55:31 crc kubenswrapper[4799]: E0121 17:55:31.698808 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerName="dnsmasq-dns" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.698827 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerName="dnsmasq-dns" Jan 21 17:55:31 crc kubenswrapper[4799]: E0121 17:55:31.698838 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-metadata" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.698845 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-metadata" Jan 21 17:55:31 crc kubenswrapper[4799]: E0121 17:55:31.698860 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1faad361-9dfd-4168-aaa4-626082473a62" containerName="nova-manage" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.698867 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1faad361-9dfd-4168-aaa4-626082473a62" containerName="nova-manage" Jan 21 17:55:31 crc kubenswrapper[4799]: E0121 17:55:31.698900 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerName="init" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.698909 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerName="init" Jan 21 17:55:31 crc kubenswrapper[4799]: E0121 17:55:31.698919 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-log" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.698927 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-log" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.699199 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-metadata" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.699229 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1faad361-9dfd-4168-aaa4-626082473a62" containerName="nova-manage" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.699242 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbe752ec-bf40-44cc-b1df-c3d7e2acc01c" containerName="dnsmasq-dns" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.699271 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" containerName="nova-metadata-log" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.700733 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.704243 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.706094 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.709166 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.775229 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htqrb\" (UniqueName: \"kubernetes.io/projected/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-kube-api-access-htqrb\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.775310 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-logs\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.775428 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-config-data\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.775478 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.775508 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.877212 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.877288 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.877358 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htqrb\" (UniqueName: \"kubernetes.io/projected/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-kube-api-access-htqrb\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.877408 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-logs\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.877598 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-config-data\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.878030 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-logs\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.881710 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-config-data\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.888810 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.890320 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:31 crc kubenswrapper[4799]: I0121 17:55:31.898579 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htqrb\" (UniqueName: \"kubernetes.io/projected/d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd-kube-api-access-htqrb\") pod \"nova-metadata-0\" (UID: \"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd\") " pod="openstack/nova-metadata-0" Jan 21 17:55:32 crc kubenswrapper[4799]: I0121 17:55:32.024097 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 21 17:55:32 crc kubenswrapper[4799]: I0121 17:55:32.219534 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="423606aa-ec3f-4223-a607-b88f5c132e91" path="/var/lib/kubelet/pods/423606aa-ec3f-4223-a607-b88f5c132e91/volumes" Jan 21 17:55:32 crc kubenswrapper[4799]: I0121 17:55:32.509891 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 21 17:55:32 crc kubenswrapper[4799]: I0121 17:55:32.644631 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd","Type":"ContainerStarted","Data":"3d600bf2440220fcc9517ab29ee2f4a7257c4e8bcb412c969de8c99e189987a8"} Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.228588 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.302873 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-public-tls-certs\") pod \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.303231 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t59fr\" (UniqueName: \"kubernetes.io/projected/857bdfa7-ef8d-407d-ad68-3d401fad4c43-kube-api-access-t59fr\") pod \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.303489 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-combined-ca-bundle\") pod \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.304078 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-internal-tls-certs\") pod \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.304222 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-config-data\") pod \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.304320 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857bdfa7-ef8d-407d-ad68-3d401fad4c43-logs\") pod \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\" (UID: \"857bdfa7-ef8d-407d-ad68-3d401fad4c43\") " Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.308393 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/857bdfa7-ef8d-407d-ad68-3d401fad4c43-logs" (OuterVolumeSpecName: "logs") pod "857bdfa7-ef8d-407d-ad68-3d401fad4c43" (UID: "857bdfa7-ef8d-407d-ad68-3d401fad4c43"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.311394 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/857bdfa7-ef8d-407d-ad68-3d401fad4c43-kube-api-access-t59fr" (OuterVolumeSpecName: "kube-api-access-t59fr") pod "857bdfa7-ef8d-407d-ad68-3d401fad4c43" (UID: "857bdfa7-ef8d-407d-ad68-3d401fad4c43"). InnerVolumeSpecName "kube-api-access-t59fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.341118 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-config-data" (OuterVolumeSpecName: "config-data") pod "857bdfa7-ef8d-407d-ad68-3d401fad4c43" (UID: "857bdfa7-ef8d-407d-ad68-3d401fad4c43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.348477 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "857bdfa7-ef8d-407d-ad68-3d401fad4c43" (UID: "857bdfa7-ef8d-407d-ad68-3d401fad4c43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.364332 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "857bdfa7-ef8d-407d-ad68-3d401fad4c43" (UID: "857bdfa7-ef8d-407d-ad68-3d401fad4c43"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.371384 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "857bdfa7-ef8d-407d-ad68-3d401fad4c43" (UID: "857bdfa7-ef8d-407d-ad68-3d401fad4c43"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.406942 4799 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.406976 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t59fr\" (UniqueName: \"kubernetes.io/projected/857bdfa7-ef8d-407d-ad68-3d401fad4c43-kube-api-access-t59fr\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.406988 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.406997 4799 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.407008 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857bdfa7-ef8d-407d-ad68-3d401fad4c43-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.407016 4799 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857bdfa7-ef8d-407d-ad68-3d401fad4c43-logs\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.658368 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd","Type":"ContainerStarted","Data":"0658aa4e1a07207f3c0154f230c4c88a7df22324ce34815fd89a4b20be240d5d"} Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.658423 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd","Type":"ContainerStarted","Data":"16fa70b7edf10b043c7218c0303a774d13360222d1422b00dbc70caed83cb5c0"} Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.662401 4799 generic.go:334] "Generic (PLEG): container finished" podID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerID="aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6" exitCode=0 Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.662443 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"857bdfa7-ef8d-407d-ad68-3d401fad4c43","Type":"ContainerDied","Data":"aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6"} Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.662467 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"857bdfa7-ef8d-407d-ad68-3d401fad4c43","Type":"ContainerDied","Data":"f5eff417197244cdb084b04482a5a56fe3a8efa29d677456826c56de9b7f8007"} Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.662488 4799 scope.go:117] "RemoveContainer" containerID="aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.662623 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.708956 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.708929821 podStartE2EDuration="2.708929821s" podCreationTimestamp="2026-01-21 17:55:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:55:33.693602011 +0000 UTC m=+1360.319892044" watchObservedRunningTime="2026-01-21 17:55:33.708929821 +0000 UTC m=+1360.335219854" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.729620 4799 scope.go:117] "RemoveContainer" containerID="88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.741311 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.779847 4799 scope.go:117] "RemoveContainer" containerID="aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6" Jan 21 17:55:33 crc kubenswrapper[4799]: E0121 17:55:33.780364 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6\": container with ID starting with aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6 not found: ID does not exist" containerID="aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.780405 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6"} err="failed to get container status \"aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6\": rpc error: code = NotFound desc = could not find container \"aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6\": container with ID starting with aae77d3910f9679fb9cfe0f6bc1a13611d0f2f25169a578bfebd7d306dcf34f6 not found: ID does not exist" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.780435 4799 scope.go:117] "RemoveContainer" containerID="88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af" Jan 21 17:55:33 crc kubenswrapper[4799]: E0121 17:55:33.780727 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af\": container with ID starting with 88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af not found: ID does not exist" containerID="88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.780755 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af"} err="failed to get container status \"88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af\": rpc error: code = NotFound desc = could not find container \"88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af\": container with ID starting with 88b841e5f3b7df4a90393a9457991f4f0ee992486d88a2944fd6761f9bd4a5af not found: ID does not exist" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.783649 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.806418 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:33 crc kubenswrapper[4799]: E0121 17:55:33.807114 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-log" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.807152 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-log" Jan 21 17:55:33 crc kubenswrapper[4799]: E0121 17:55:33.807173 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-api" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.807182 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-api" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.807471 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-api" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.807502 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" containerName="nova-api-log" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.809092 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.811893 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.818059 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.818323 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.824309 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.919222 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d954dc98-6a6a-49b8-976c-b668619adcff-logs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.919372 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.919418 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4hcz\" (UniqueName: \"kubernetes.io/projected/d954dc98-6a6a-49b8-976c-b668619adcff-kube-api-access-r4hcz\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.919459 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-public-tls-certs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.919647 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:33 crc kubenswrapper[4799]: I0121 17:55:33.919693 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-config-data\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.022026 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.022084 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-config-data\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.022208 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d954dc98-6a6a-49b8-976c-b668619adcff-logs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.022253 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.022272 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4hcz\" (UniqueName: \"kubernetes.io/projected/d954dc98-6a6a-49b8-976c-b668619adcff-kube-api-access-r4hcz\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.022294 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-public-tls-certs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.022987 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d954dc98-6a6a-49b8-976c-b668619adcff-logs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.026842 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.026933 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-public-tls-certs\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.027064 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-config-data\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.029391 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d954dc98-6a6a-49b8-976c-b668619adcff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.047050 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4hcz\" (UniqueName: \"kubernetes.io/projected/d954dc98-6a6a-49b8-976c-b668619adcff-kube-api-access-r4hcz\") pod \"nova-api-0\" (UID: \"d954dc98-6a6a-49b8-976c-b668619adcff\") " pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.131769 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.224687 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="857bdfa7-ef8d-407d-ad68-3d401fad4c43" path="/var/lib/kubelet/pods/857bdfa7-ef8d-407d-ad68-3d401fad4c43/volumes" Jan 21 17:55:34 crc kubenswrapper[4799]: I0121 17:55:34.673601 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 21 17:55:35 crc kubenswrapper[4799]: E0121 17:55:35.625451 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5 is running failed: container process not found" containerID="068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:55:35 crc kubenswrapper[4799]: E0121 17:55:35.626469 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5 is running failed: container process not found" containerID="068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:55:35 crc kubenswrapper[4799]: E0121 17:55:35.627427 4799 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5 is running failed: container process not found" containerID="068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 21 17:55:35 crc kubenswrapper[4799]: E0121 17:55:35.627469 4799 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d6cb4b02-7469-4b56-9bc8-ae205587439c" containerName="nova-scheduler-scheduler" Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.711808 4799 generic.go:334] "Generic (PLEG): container finished" podID="d6cb4b02-7469-4b56-9bc8-ae205587439c" containerID="068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" exitCode=0 Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.711882 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d6cb4b02-7469-4b56-9bc8-ae205587439c","Type":"ContainerDied","Data":"068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5"} Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.722170 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d954dc98-6a6a-49b8-976c-b668619adcff","Type":"ContainerStarted","Data":"579f34ea27ba5e08ffce45ad1850432855bc2febee7b36abe4a081d7b456be63"} Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.722231 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d954dc98-6a6a-49b8-976c-b668619adcff","Type":"ContainerStarted","Data":"5eb04a1bfbcff04002d2a7f32a6465fcdb0840a395b0e2f451f0d69f8e8518b1"} Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.722245 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d954dc98-6a6a-49b8-976c-b668619adcff","Type":"ContainerStarted","Data":"17b173832a119c7bd4385e460d62a464caa8f4e7ca018dc2b8f041f2eeb159bd"} Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.748063 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.748040921 podStartE2EDuration="2.748040921s" podCreationTimestamp="2026-01-21 17:55:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:55:35.737201937 +0000 UTC m=+1362.363491960" watchObservedRunningTime="2026-01-21 17:55:35.748040921 +0000 UTC m=+1362.374330944" Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.815873 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.886601 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-config-data\") pod \"d6cb4b02-7469-4b56-9bc8-ae205587439c\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.886698 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-combined-ca-bundle\") pod \"d6cb4b02-7469-4b56-9bc8-ae205587439c\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.886859 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz27f\" (UniqueName: \"kubernetes.io/projected/d6cb4b02-7469-4b56-9bc8-ae205587439c-kube-api-access-cz27f\") pod \"d6cb4b02-7469-4b56-9bc8-ae205587439c\" (UID: \"d6cb4b02-7469-4b56-9bc8-ae205587439c\") " Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.894553 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6cb4b02-7469-4b56-9bc8-ae205587439c-kube-api-access-cz27f" (OuterVolumeSpecName: "kube-api-access-cz27f") pod "d6cb4b02-7469-4b56-9bc8-ae205587439c" (UID: "d6cb4b02-7469-4b56-9bc8-ae205587439c"). InnerVolumeSpecName "kube-api-access-cz27f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.917247 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-config-data" (OuterVolumeSpecName: "config-data") pod "d6cb4b02-7469-4b56-9bc8-ae205587439c" (UID: "d6cb4b02-7469-4b56-9bc8-ae205587439c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.919255 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6cb4b02-7469-4b56-9bc8-ae205587439c" (UID: "d6cb4b02-7469-4b56-9bc8-ae205587439c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.989637 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz27f\" (UniqueName: \"kubernetes.io/projected/d6cb4b02-7469-4b56-9bc8-ae205587439c-kube-api-access-cz27f\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.989678 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:35 crc kubenswrapper[4799]: I0121 17:55:35.989694 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6cb4b02-7469-4b56-9bc8-ae205587439c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.733619 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.733618 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d6cb4b02-7469-4b56-9bc8-ae205587439c","Type":"ContainerDied","Data":"401fa791bf8bfa3dab2c912b13713602df79f36e2fbc6d093f572fd96d1759d0"} Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.734028 4799 scope.go:117] "RemoveContainer" containerID="068bf8bb281f75e4be692955ecab11348760b6f46ff626802e22f1a182ec48c5" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.765869 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.789571 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.827684 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:55:36 crc kubenswrapper[4799]: E0121 17:55:36.828677 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6cb4b02-7469-4b56-9bc8-ae205587439c" containerName="nova-scheduler-scheduler" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.828724 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6cb4b02-7469-4b56-9bc8-ae205587439c" containerName="nova-scheduler-scheduler" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.829152 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6cb4b02-7469-4b56-9bc8-ae205587439c" containerName="nova-scheduler-scheduler" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.830632 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.833248 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.840381 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.908923 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113d1aee-0a9f-47dd-9a33-ab951cab8535-config-data\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.909079 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwz9r\" (UniqueName: \"kubernetes.io/projected/113d1aee-0a9f-47dd-9a33-ab951cab8535-kube-api-access-wwz9r\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:36 crc kubenswrapper[4799]: I0121 17:55:36.909198 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113d1aee-0a9f-47dd-9a33-ab951cab8535-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.011543 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwz9r\" (UniqueName: \"kubernetes.io/projected/113d1aee-0a9f-47dd-9a33-ab951cab8535-kube-api-access-wwz9r\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.011686 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113d1aee-0a9f-47dd-9a33-ab951cab8535-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.011800 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113d1aee-0a9f-47dd-9a33-ab951cab8535-config-data\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.016433 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113d1aee-0a9f-47dd-9a33-ab951cab8535-config-data\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.017547 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113d1aee-0a9f-47dd-9a33-ab951cab8535-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.025002 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.025072 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.033714 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwz9r\" (UniqueName: \"kubernetes.io/projected/113d1aee-0a9f-47dd-9a33-ab951cab8535-kube-api-access-wwz9r\") pod \"nova-scheduler-0\" (UID: \"113d1aee-0a9f-47dd-9a33-ab951cab8535\") " pod="openstack/nova-scheduler-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.154730 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.686367 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 21 17:55:37 crc kubenswrapper[4799]: W0121 17:55:37.687888 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod113d1aee_0a9f_47dd_9a33_ab951cab8535.slice/crio-4ab0e02e85e2a2e84b334865ba79999790549826e7786ce7f08d9229d528b96d WatchSource:0}: Error finding container 4ab0e02e85e2a2e84b334865ba79999790549826e7786ce7f08d9229d528b96d: Status 404 returned error can't find the container with id 4ab0e02e85e2a2e84b334865ba79999790549826e7786ce7f08d9229d528b96d Jan 21 17:55:37 crc kubenswrapper[4799]: I0121 17:55:37.758839 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"113d1aee-0a9f-47dd-9a33-ab951cab8535","Type":"ContainerStarted","Data":"4ab0e02e85e2a2e84b334865ba79999790549826e7786ce7f08d9229d528b96d"} Jan 21 17:55:38 crc kubenswrapper[4799]: I0121 17:55:38.219845 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6cb4b02-7469-4b56-9bc8-ae205587439c" path="/var/lib/kubelet/pods/d6cb4b02-7469-4b56-9bc8-ae205587439c/volumes" Jan 21 17:55:38 crc kubenswrapper[4799]: I0121 17:55:38.777312 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"113d1aee-0a9f-47dd-9a33-ab951cab8535","Type":"ContainerStarted","Data":"1569b95cd07b520d14f2e6aa9346e400e16866da8d36c56d1339d87a71a7d9ce"} Jan 21 17:55:42 crc kubenswrapper[4799]: I0121 17:55:42.025222 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 21 17:55:42 crc kubenswrapper[4799]: I0121 17:55:42.025802 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 21 17:55:42 crc kubenswrapper[4799]: I0121 17:55:42.155861 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 21 17:55:43 crc kubenswrapper[4799]: I0121 17:55:43.031580 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 21 17:55:43 crc kubenswrapper[4799]: I0121 17:55:43.031642 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 21 17:55:44 crc kubenswrapper[4799]: I0121 17:55:44.133120 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 21 17:55:44 crc kubenswrapper[4799]: I0121 17:55:44.133206 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 21 17:55:45 crc kubenswrapper[4799]: I0121 17:55:45.147325 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d954dc98-6a6a-49b8-976c-b668619adcff" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.222:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 21 17:55:45 crc kubenswrapper[4799]: I0121 17:55:45.147311 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d954dc98-6a6a-49b8-976c-b668619adcff" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.222:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 21 17:55:46 crc kubenswrapper[4799]: I0121 17:55:46.888993 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 21 17:55:46 crc kubenswrapper[4799]: I0121 17:55:46.936524 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=10.936485578 podStartE2EDuration="10.936485578s" podCreationTimestamp="2026-01-21 17:55:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:55:38.809392877 +0000 UTC m=+1365.435682940" watchObservedRunningTime="2026-01-21 17:55:46.936485578 +0000 UTC m=+1373.562775641" Jan 21 17:55:47 crc kubenswrapper[4799]: I0121 17:55:47.157739 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 21 17:55:47 crc kubenswrapper[4799]: I0121 17:55:47.194724 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 21 17:55:47 crc kubenswrapper[4799]: I0121 17:55:47.908002 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 21 17:55:52 crc kubenswrapper[4799]: I0121 17:55:52.033449 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 21 17:55:52 crc kubenswrapper[4799]: I0121 17:55:52.035058 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 21 17:55:52 crc kubenswrapper[4799]: I0121 17:55:52.044032 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 21 17:55:52 crc kubenswrapper[4799]: I0121 17:55:52.932166 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 21 17:55:54 crc kubenswrapper[4799]: I0121 17:55:54.148968 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 21 17:55:54 crc kubenswrapper[4799]: I0121 17:55:54.150684 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 21 17:55:54 crc kubenswrapper[4799]: I0121 17:55:54.165161 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 21 17:55:54 crc kubenswrapper[4799]: I0121 17:55:54.165315 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 21 17:55:54 crc kubenswrapper[4799]: I0121 17:55:54.943608 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 21 17:55:54 crc kubenswrapper[4799]: I0121 17:55:54.956278 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 21 17:56:05 crc kubenswrapper[4799]: I0121 17:56:05.103280 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:56:06 crc kubenswrapper[4799]: I0121 17:56:06.089615 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:56:08 crc kubenswrapper[4799]: I0121 17:56:08.704385 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="48f0f966-0779-4959-884e-eae4ed66e969" containerName="rabbitmq" containerID="cri-o://0a1bba0cc2d36467280bd23ec59ec0a87b3c3d464346de4cff87e4c3a2018228" gracePeriod=604797 Jan 21 17:56:09 crc kubenswrapper[4799]: I0121 17:56:09.645383 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerName="rabbitmq" containerID="cri-o://e19b9f0e038c9ded9ea4b11681266954f4ea8cb749b3b051e6c9dbd2bb1f658d" gracePeriod=604797 Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.229050 4799 generic.go:334] "Generic (PLEG): container finished" podID="48f0f966-0779-4959-884e-eae4ed66e969" containerID="0a1bba0cc2d36467280bd23ec59ec0a87b3c3d464346de4cff87e4c3a2018228" exitCode=0 Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.239193 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"48f0f966-0779-4959-884e-eae4ed66e969","Type":"ContainerDied","Data":"0a1bba0cc2d36467280bd23ec59ec0a87b3c3d464346de4cff87e4c3a2018228"} Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.398725 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526070 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-confd\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526141 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-tls\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526187 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-erlang-cookie\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526230 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-server-conf\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526295 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-config-data\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526345 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rckht\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-kube-api-access-rckht\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526403 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-plugins-conf\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526453 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48f0f966-0779-4959-884e-eae4ed66e969-erlang-cookie-secret\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526501 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.526713 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-plugins\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.527311 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.527431 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.527708 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.527736 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48f0f966-0779-4959-884e-eae4ed66e969-pod-info\") pod \"48f0f966-0779-4959-884e-eae4ed66e969\" (UID: \"48f0f966-0779-4959-884e-eae4ed66e969\") " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.528654 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.528672 4799 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.528683 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.548805 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/48f0f966-0779-4959-884e-eae4ed66e969-pod-info" (OuterVolumeSpecName: "pod-info") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.550382 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.550427 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-kube-api-access-rckht" (OuterVolumeSpecName: "kube-api-access-rckht") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "kube-api-access-rckht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.555519 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48f0f966-0779-4959-884e-eae4ed66e969-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.657215 4799 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48f0f966-0779-4959-884e-eae4ed66e969-pod-info\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.657268 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rckht\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-kube-api-access-rckht\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.657286 4799 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48f0f966-0779-4959-884e-eae4ed66e969-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.657319 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.658886 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-config-data" (OuterVolumeSpecName: "config-data") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.682326 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.703064 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.736732 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-server-conf" (OuterVolumeSpecName: "server-conf") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.758874 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.758910 4799 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-server-conf\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.758924 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48f0f966-0779-4959-884e-eae4ed66e969-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.758938 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.766063 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "48f0f966-0779-4959-884e-eae4ed66e969" (UID: "48f0f966-0779-4959-884e-eae4ed66e969"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:10 crc kubenswrapper[4799]: I0121 17:56:10.860474 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48f0f966-0779-4959-884e-eae4ed66e969-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.241990 4799 generic.go:334] "Generic (PLEG): container finished" podID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerID="e19b9f0e038c9ded9ea4b11681266954f4ea8cb749b3b051e6c9dbd2bb1f658d" exitCode=0 Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.242110 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"03a5694f-1e8b-490e-be8f-dce31bdd83c3","Type":"ContainerDied","Data":"e19b9f0e038c9ded9ea4b11681266954f4ea8cb749b3b051e6c9dbd2bb1f658d"} Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.242428 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"03a5694f-1e8b-490e-be8f-dce31bdd83c3","Type":"ContainerDied","Data":"a3a5c63d36782c1c68d50cb46db514f4b49f6ea52b039d826b2e8359f2c1d7d7"} Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.242463 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3a5c63d36782c1c68d50cb46db514f4b49f6ea52b039d826b2e8359f2c1d7d7" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.245791 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"48f0f966-0779-4959-884e-eae4ed66e969","Type":"ContainerDied","Data":"766d0aa0cbba878c9cf0f4bfc16780f81630549cfa0224a60ee50b57f0b2f61f"} Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.245860 4799 scope.go:117] "RemoveContainer" containerID="0a1bba0cc2d36467280bd23ec59ec0a87b3c3d464346de4cff87e4c3a2018228" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.245873 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.310512 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.328485 4799 scope.go:117] "RemoveContainer" containerID="c44dea80f4b6b10d56559fe49cb3b1af988bd74e190232574355f35b1495761d" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.333999 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.344461 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.380891 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:56:11 crc kubenswrapper[4799]: E0121 17:56:11.381855 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerName="rabbitmq" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.381871 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerName="rabbitmq" Jan 21 17:56:11 crc kubenswrapper[4799]: E0121 17:56:11.381893 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f0f966-0779-4959-884e-eae4ed66e969" containerName="rabbitmq" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.381900 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f0f966-0779-4959-884e-eae4ed66e969" containerName="rabbitmq" Jan 21 17:56:11 crc kubenswrapper[4799]: E0121 17:56:11.381926 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f0f966-0779-4959-884e-eae4ed66e969" containerName="setup-container" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.381933 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f0f966-0779-4959-884e-eae4ed66e969" containerName="setup-container" Jan 21 17:56:11 crc kubenswrapper[4799]: E0121 17:56:11.381964 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerName="setup-container" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.381970 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerName="setup-container" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.382401 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="48f0f966-0779-4959-884e-eae4ed66e969" containerName="rabbitmq" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.382448 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerName="rabbitmq" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.394277 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.429603 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.430344 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-nwh8p" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.430553 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.430653 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.430820 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.430987 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.431506 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.481882 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46nw9\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-kube-api-access-46nw9\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.481947 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-server-conf\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.482031 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/03a5694f-1e8b-490e-be8f-dce31bdd83c3-pod-info\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.482089 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-plugins\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.482111 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-plugins-conf\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.484575 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.485515 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-erlang-cookie\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.485628 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/03a5694f-1e8b-490e-be8f-dce31bdd83c3-erlang-cookie-secret\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.485656 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-config-data\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.485688 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-confd\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.486053 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-tls\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.486179 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\" (UID: \"03a5694f-1e8b-490e-be8f-dce31bdd83c3\") " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.486362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.486673 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.487279 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.487308 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.487622 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.529829 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/03a5694f-1e8b-490e-be8f-dce31bdd83c3-pod-info" (OuterVolumeSpecName: "pod-info") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.532281 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03a5694f-1e8b-490e-be8f-dce31bdd83c3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.534858 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-kube-api-access-46nw9" (OuterVolumeSpecName: "kube-api-access-46nw9") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "kube-api-access-46nw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.546549 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.553369 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.589672 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.589771 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm4hn\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-kube-api-access-jm4hn\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.589825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/135b6a22-006b-4270-a559-39fc323570b2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.589875 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.589906 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.589957 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-config-data\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590001 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/135b6a22-006b-4270-a559-39fc323570b2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590036 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590098 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590255 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590382 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590458 4799 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/03a5694f-1e8b-490e-be8f-dce31bdd83c3-pod-info\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590479 4799 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590492 4799 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/03a5694f-1e8b-490e-be8f-dce31bdd83c3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590503 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590527 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.590733 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46nw9\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-kube-api-access-46nw9\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.714225 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716631 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716713 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716761 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm4hn\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-kube-api-access-jm4hn\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716801 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/135b6a22-006b-4270-a559-39fc323570b2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716835 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716853 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716891 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-config-data\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716922 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/135b6a22-006b-4270-a559-39fc323570b2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.716985 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.717015 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.717120 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.717515 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.718414 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.723410 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.724807 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.726600 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/135b6a22-006b-4270-a559-39fc323570b2-config-data\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.732110 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.740788 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/135b6a22-006b-4270-a559-39fc323570b2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.742845 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.744350 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/135b6a22-006b-4270-a559-39fc323570b2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.765666 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-config-data" (OuterVolumeSpecName: "config-data") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.777019 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm4hn\" (UniqueName: \"kubernetes.io/projected/135b6a22-006b-4270-a559-39fc323570b2-kube-api-access-jm4hn\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.836171 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.836255 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.836366 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.946501 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-server-conf" (OuterVolumeSpecName: "server-conf") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:11 crc kubenswrapper[4799]: I0121 17:56:11.964218 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"135b6a22-006b-4270-a559-39fc323570b2\") " pod="openstack/rabbitmq-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.036309 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.040376 4799 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/03a5694f-1e8b-490e-be8f-dce31bdd83c3-server-conf\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.046335 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "03a5694f-1e8b-490e-be8f-dce31bdd83c3" (UID: "03a5694f-1e8b-490e-be8f-dce31bdd83c3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.145732 4799 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/03a5694f-1e8b-490e-be8f-dce31bdd83c3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.221738 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48f0f966-0779-4959-884e-eae4ed66e969" path="/var/lib/kubelet/pods/48f0f966-0779-4959-884e-eae4ed66e969/volumes" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.257421 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.301179 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.321760 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.332411 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.354432 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.356243 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.358896 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.359306 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.359481 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.359665 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.359818 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-c67js" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.359972 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.360343 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.384008 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.552737 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/88d1e166-bb2f-473e-a955-e79c6251a580-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.552849 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/88d1e166-bb2f-473e-a955-e79c6251a580-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.552990 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.553019 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.553057 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.553215 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.553278 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.553375 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg5dl\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-kube-api-access-tg5dl\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.553580 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.553656 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.553722 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656113 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656200 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656234 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656265 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/88d1e166-bb2f-473e-a955-e79c6251a580-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656304 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/88d1e166-bb2f-473e-a955-e79c6251a580-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656326 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656352 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656379 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656409 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656434 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.656449 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg5dl\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-kube-api-access-tg5dl\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.657232 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.657466 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.658083 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.659044 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.659466 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.660297 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/88d1e166-bb2f-473e-a955-e79c6251a580-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.662420 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/88d1e166-bb2f-473e-a955-e79c6251a580-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.662815 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/88d1e166-bb2f-473e-a955-e79c6251a580-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.663153 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.664605 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.679162 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg5dl\" (UniqueName: \"kubernetes.io/projected/88d1e166-bb2f-473e-a955-e79c6251a580-kube-api-access-tg5dl\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:12 crc kubenswrapper[4799]: I0121 17:56:12.705280 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"88d1e166-bb2f-473e-a955-e79c6251a580\") " pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:13 crc kubenswrapper[4799]: I0121 17:56:13.087944 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:13 crc kubenswrapper[4799]: I0121 17:56:13.273074 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"135b6a22-006b-4270-a559-39fc323570b2","Type":"ContainerStarted","Data":"48bc4abe8c92162f9903ec6275dd4edf226bce1ba6ff3d29eac4e6b02e5d3f6f"} Jan 21 17:56:13 crc kubenswrapper[4799]: I0121 17:56:13.611496 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 21 17:56:14 crc kubenswrapper[4799]: I0121 17:56:14.240830 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" path="/var/lib/kubelet/pods/03a5694f-1e8b-490e-be8f-dce31bdd83c3/volumes" Jan 21 17:56:14 crc kubenswrapper[4799]: I0121 17:56:14.291412 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"88d1e166-bb2f-473e-a955-e79c6251a580","Type":"ContainerStarted","Data":"bc337bf9923cf46d411c74f2ce83b87a411cd72ac3b8480b1e142901a1f5db0d"} Jan 21 17:56:14 crc kubenswrapper[4799]: I0121 17:56:14.293256 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"135b6a22-006b-4270-a559-39fc323570b2","Type":"ContainerStarted","Data":"191abc08878003aecd4013471ed66904789fbc75e7e865367b1aa9ec7fdb72c6"} Jan 21 17:56:15 crc kubenswrapper[4799]: I0121 17:56:15.318367 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"88d1e166-bb2f-473e-a955-e79c6251a580","Type":"ContainerStarted","Data":"f8e1006a5e4eac397850d24cc58eeeda03a573d9a2216b34dee7a75e8a470365"} Jan 21 17:56:16 crc kubenswrapper[4799]: I0121 17:56:16.217937 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="03a5694f-1e8b-490e-be8f-dce31bdd83c3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: i/o timeout" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.519654 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f64fd4b4c-frnzp"] Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.521769 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.524792 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.554840 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f64fd4b4c-frnzp"] Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.560280 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-nb\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.560342 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-config\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.560381 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-sb\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.560542 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-openstack-edpm-ipam\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.560588 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-svc\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.560612 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j898t\" (UniqueName: \"kubernetes.io/projected/9efdc452-a321-4a25-adf5-67894f911570-kube-api-access-j898t\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.560648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-swift-storage-0\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.661368 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-openstack-edpm-ipam\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.661437 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-svc\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.661462 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j898t\" (UniqueName: \"kubernetes.io/projected/9efdc452-a321-4a25-adf5-67894f911570-kube-api-access-j898t\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.661491 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-swift-storage-0\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.661518 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-nb\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.661545 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-config\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.661579 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-sb\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.662372 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-openstack-edpm-ipam\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.662553 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-svc\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.662578 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-swift-storage-0\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.662697 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-sb\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.662807 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-config\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.662901 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-nb\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.693589 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j898t\" (UniqueName: \"kubernetes.io/projected/9efdc452-a321-4a25-adf5-67894f911570-kube-api-access-j898t\") pod \"dnsmasq-dns-6f64fd4b4c-frnzp\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:21 crc kubenswrapper[4799]: I0121 17:56:21.843296 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:22 crc kubenswrapper[4799]: I0121 17:56:22.381473 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f64fd4b4c-frnzp"] Jan 21 17:56:23 crc kubenswrapper[4799]: I0121 17:56:23.412403 4799 generic.go:334] "Generic (PLEG): container finished" podID="9efdc452-a321-4a25-adf5-67894f911570" containerID="14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7" exitCode=0 Jan 21 17:56:23 crc kubenswrapper[4799]: I0121 17:56:23.412508 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" event={"ID":"9efdc452-a321-4a25-adf5-67894f911570","Type":"ContainerDied","Data":"14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7"} Jan 21 17:56:23 crc kubenswrapper[4799]: I0121 17:56:23.412706 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" event={"ID":"9efdc452-a321-4a25-adf5-67894f911570","Type":"ContainerStarted","Data":"6aa47fe06b06b1a72f42b14887f7c8e403d6a88a5d302a658d8a9055e8edbef7"} Jan 21 17:56:24 crc kubenswrapper[4799]: I0121 17:56:24.429107 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" event={"ID":"9efdc452-a321-4a25-adf5-67894f911570","Type":"ContainerStarted","Data":"e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa"} Jan 21 17:56:24 crc kubenswrapper[4799]: I0121 17:56:24.429483 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:24 crc kubenswrapper[4799]: I0121 17:56:24.452913 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" podStartSLOduration=3.452891674 podStartE2EDuration="3.452891674s" podCreationTimestamp="2026-01-21 17:56:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:56:24.449104067 +0000 UTC m=+1411.075394120" watchObservedRunningTime="2026-01-21 17:56:24.452891674 +0000 UTC m=+1411.079181687" Jan 21 17:56:31 crc kubenswrapper[4799]: I0121 17:56:31.845445 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:31 crc kubenswrapper[4799]: I0121 17:56:31.941210 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dbd69cdbc-vhw4k"] Jan 21 17:56:31 crc kubenswrapper[4799]: I0121 17:56:31.941711 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" podUID="82c5bc2f-a942-452f-9904-825ee865bee7" containerName="dnsmasq-dns" containerID="cri-o://8257368547dac2a98a11908961d8e956daf1070dcfa3a74cb1275428ab2cc9fb" gracePeriod=10 Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.168806 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59899cb9c-whmhs"] Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.172326 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.195794 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59899cb9c-whmhs"] Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.226934 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhsnr\" (UniqueName: \"kubernetes.io/projected/41441182-ee7b-46da-9f86-975ad9b22777-kube-api-access-mhsnr\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.227156 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-dns-swift-storage-0\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.227202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-dns-svc\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.227250 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-ovsdbserver-sb\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.227621 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-config\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.227868 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-openstack-edpm-ipam\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.227934 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-ovsdbserver-nb\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.331684 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-dns-swift-storage-0\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.331738 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-dns-svc\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.331773 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-ovsdbserver-sb\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.331852 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-config\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.331965 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-openstack-edpm-ipam\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.331997 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-ovsdbserver-nb\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.332038 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhsnr\" (UniqueName: \"kubernetes.io/projected/41441182-ee7b-46da-9f86-975ad9b22777-kube-api-access-mhsnr\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.333637 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-dns-swift-storage-0\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.334193 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-dns-svc\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.335320 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-ovsdbserver-sb\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.335703 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-ovsdbserver-nb\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.335785 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-config\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.335808 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/41441182-ee7b-46da-9f86-975ad9b22777-openstack-edpm-ipam\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.359774 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhsnr\" (UniqueName: \"kubernetes.io/projected/41441182-ee7b-46da-9f86-975ad9b22777-kube-api-access-mhsnr\") pod \"dnsmasq-dns-59899cb9c-whmhs\" (UID: \"41441182-ee7b-46da-9f86-975ad9b22777\") " pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.507367 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.532929 4799 generic.go:334] "Generic (PLEG): container finished" podID="82c5bc2f-a942-452f-9904-825ee865bee7" containerID="8257368547dac2a98a11908961d8e956daf1070dcfa3a74cb1275428ab2cc9fb" exitCode=0 Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.532984 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" event={"ID":"82c5bc2f-a942-452f-9904-825ee865bee7","Type":"ContainerDied","Data":"8257368547dac2a98a11908961d8e956daf1070dcfa3a74cb1275428ab2cc9fb"} Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.533031 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" event={"ID":"82c5bc2f-a942-452f-9904-825ee865bee7","Type":"ContainerDied","Data":"7838c8b52718de434a27478284b951ffc3e81b3431d82fb149059c75392ff9b4"} Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.533046 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7838c8b52718de434a27478284b951ffc3e81b3431d82fb149059c75392ff9b4" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.656114 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.841511 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw6rn\" (UniqueName: \"kubernetes.io/projected/82c5bc2f-a942-452f-9904-825ee865bee7-kube-api-access-vw6rn\") pod \"82c5bc2f-a942-452f-9904-825ee865bee7\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.841628 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-svc\") pod \"82c5bc2f-a942-452f-9904-825ee865bee7\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.841660 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-config\") pod \"82c5bc2f-a942-452f-9904-825ee865bee7\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.841728 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-nb\") pod \"82c5bc2f-a942-452f-9904-825ee865bee7\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.841764 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-sb\") pod \"82c5bc2f-a942-452f-9904-825ee865bee7\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.841898 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-swift-storage-0\") pod \"82c5bc2f-a942-452f-9904-825ee865bee7\" (UID: \"82c5bc2f-a942-452f-9904-825ee865bee7\") " Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.847497 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82c5bc2f-a942-452f-9904-825ee865bee7-kube-api-access-vw6rn" (OuterVolumeSpecName: "kube-api-access-vw6rn") pod "82c5bc2f-a942-452f-9904-825ee865bee7" (UID: "82c5bc2f-a942-452f-9904-825ee865bee7"). InnerVolumeSpecName "kube-api-access-vw6rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.904696 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "82c5bc2f-a942-452f-9904-825ee865bee7" (UID: "82c5bc2f-a942-452f-9904-825ee865bee7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.912179 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "82c5bc2f-a942-452f-9904-825ee865bee7" (UID: "82c5bc2f-a942-452f-9904-825ee865bee7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.913634 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "82c5bc2f-a942-452f-9904-825ee865bee7" (UID: "82c5bc2f-a942-452f-9904-825ee865bee7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.920432 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "82c5bc2f-a942-452f-9904-825ee865bee7" (UID: "82c5bc2f-a942-452f-9904-825ee865bee7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.919216 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-config" (OuterVolumeSpecName: "config") pod "82c5bc2f-a942-452f-9904-825ee865bee7" (UID: "82c5bc2f-a942-452f-9904-825ee865bee7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.945963 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw6rn\" (UniqueName: \"kubernetes.io/projected/82c5bc2f-a942-452f-9904-825ee865bee7-kube-api-access-vw6rn\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.946001 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.946011 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.946021 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.946028 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:32 crc kubenswrapper[4799]: I0121 17:56:32.946037 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c5bc2f-a942-452f-9904-825ee865bee7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:33 crc kubenswrapper[4799]: I0121 17:56:33.042673 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59899cb9c-whmhs"] Jan 21 17:56:33 crc kubenswrapper[4799]: W0121 17:56:33.050257 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41441182_ee7b_46da_9f86_975ad9b22777.slice/crio-fc432f934f0e7c5685c3142da04e7002f2dd9cf86c651db1b5b4553d9c881b90 WatchSource:0}: Error finding container fc432f934f0e7c5685c3142da04e7002f2dd9cf86c651db1b5b4553d9c881b90: Status 404 returned error can't find the container with id fc432f934f0e7c5685c3142da04e7002f2dd9cf86c651db1b5b4553d9c881b90 Jan 21 17:56:33 crc kubenswrapper[4799]: I0121 17:56:33.616282 4799 generic.go:334] "Generic (PLEG): container finished" podID="41441182-ee7b-46da-9f86-975ad9b22777" containerID="fc880a796a9ac2b7e0fb338b3841bd9167f4cd960f5b6c3fce09efc324bbcdff" exitCode=0 Jan 21 17:56:33 crc kubenswrapper[4799]: I0121 17:56:33.616798 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dbd69cdbc-vhw4k" Jan 21 17:56:33 crc kubenswrapper[4799]: I0121 17:56:33.623995 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59899cb9c-whmhs" event={"ID":"41441182-ee7b-46da-9f86-975ad9b22777","Type":"ContainerDied","Data":"fc880a796a9ac2b7e0fb338b3841bd9167f4cd960f5b6c3fce09efc324bbcdff"} Jan 21 17:56:33 crc kubenswrapper[4799]: I0121 17:56:33.624057 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59899cb9c-whmhs" event={"ID":"41441182-ee7b-46da-9f86-975ad9b22777","Type":"ContainerStarted","Data":"fc432f934f0e7c5685c3142da04e7002f2dd9cf86c651db1b5b4553d9c881b90"} Jan 21 17:56:33 crc kubenswrapper[4799]: I0121 17:56:33.811561 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dbd69cdbc-vhw4k"] Jan 21 17:56:33 crc kubenswrapper[4799]: I0121 17:56:33.823657 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5dbd69cdbc-vhw4k"] Jan 21 17:56:34 crc kubenswrapper[4799]: I0121 17:56:34.219734 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82c5bc2f-a942-452f-9904-825ee865bee7" path="/var/lib/kubelet/pods/82c5bc2f-a942-452f-9904-825ee865bee7/volumes" Jan 21 17:56:34 crc kubenswrapper[4799]: I0121 17:56:34.629586 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59899cb9c-whmhs" event={"ID":"41441182-ee7b-46da-9f86-975ad9b22777","Type":"ContainerStarted","Data":"45768f502c2ade9fa5c47357127b67967ef5743c078cd83cfeff226a6eda26e1"} Jan 21 17:56:34 crc kubenswrapper[4799]: I0121 17:56:34.629862 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:34 crc kubenswrapper[4799]: I0121 17:56:34.660727 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59899cb9c-whmhs" podStartSLOduration=2.660706801 podStartE2EDuration="2.660706801s" podCreationTimestamp="2026-01-21 17:56:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:56:34.656971126 +0000 UTC m=+1421.283261159" watchObservedRunningTime="2026-01-21 17:56:34.660706801 +0000 UTC m=+1421.286996824" Jan 21 17:56:36 crc kubenswrapper[4799]: I0121 17:56:36.167615 4799 scope.go:117] "RemoveContainer" containerID="d18ca9012873ef22c48f7bd29f7fe503167022792fd44328ccf69dd39dbcf871" Jan 21 17:56:42 crc kubenswrapper[4799]: I0121 17:56:42.508918 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59899cb9c-whmhs" Jan 21 17:56:42 crc kubenswrapper[4799]: I0121 17:56:42.613707 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f64fd4b4c-frnzp"] Jan 21 17:56:42 crc kubenswrapper[4799]: I0121 17:56:42.621773 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" podUID="9efdc452-a321-4a25-adf5-67894f911570" containerName="dnsmasq-dns" containerID="cri-o://e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa" gracePeriod=10 Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.350810 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.438711 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j898t\" (UniqueName: \"kubernetes.io/projected/9efdc452-a321-4a25-adf5-67894f911570-kube-api-access-j898t\") pod \"9efdc452-a321-4a25-adf5-67894f911570\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.438771 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-nb\") pod \"9efdc452-a321-4a25-adf5-67894f911570\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.438810 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-sb\") pod \"9efdc452-a321-4a25-adf5-67894f911570\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.438956 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-svc\") pod \"9efdc452-a321-4a25-adf5-67894f911570\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.438980 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-swift-storage-0\") pod \"9efdc452-a321-4a25-adf5-67894f911570\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.439034 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-openstack-edpm-ipam\") pod \"9efdc452-a321-4a25-adf5-67894f911570\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.439110 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-config\") pod \"9efdc452-a321-4a25-adf5-67894f911570\" (UID: \"9efdc452-a321-4a25-adf5-67894f911570\") " Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.462332 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9efdc452-a321-4a25-adf5-67894f911570-kube-api-access-j898t" (OuterVolumeSpecName: "kube-api-access-j898t") pod "9efdc452-a321-4a25-adf5-67894f911570" (UID: "9efdc452-a321-4a25-adf5-67894f911570"). InnerVolumeSpecName "kube-api-access-j898t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.501790 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9efdc452-a321-4a25-adf5-67894f911570" (UID: "9efdc452-a321-4a25-adf5-67894f911570"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.515443 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-config" (OuterVolumeSpecName: "config") pod "9efdc452-a321-4a25-adf5-67894f911570" (UID: "9efdc452-a321-4a25-adf5-67894f911570"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.519718 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "9efdc452-a321-4a25-adf5-67894f911570" (UID: "9efdc452-a321-4a25-adf5-67894f911570"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.523568 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9efdc452-a321-4a25-adf5-67894f911570" (UID: "9efdc452-a321-4a25-adf5-67894f911570"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.526070 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9efdc452-a321-4a25-adf5-67894f911570" (UID: "9efdc452-a321-4a25-adf5-67894f911570"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.531946 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9efdc452-a321-4a25-adf5-67894f911570" (UID: "9efdc452-a321-4a25-adf5-67894f911570"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.543589 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j898t\" (UniqueName: \"kubernetes.io/projected/9efdc452-a321-4a25-adf5-67894f911570-kube-api-access-j898t\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.543627 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.543638 4799 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.543647 4799 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.543656 4799 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.543666 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.543674 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9efdc452-a321-4a25-adf5-67894f911570-config\") on node \"crc\" DevicePath \"\"" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.736895 4799 generic.go:334] "Generic (PLEG): container finished" podID="9efdc452-a321-4a25-adf5-67894f911570" containerID="e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa" exitCode=0 Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.736952 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" event={"ID":"9efdc452-a321-4a25-adf5-67894f911570","Type":"ContainerDied","Data":"e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa"} Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.736984 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" event={"ID":"9efdc452-a321-4a25-adf5-67894f911570","Type":"ContainerDied","Data":"6aa47fe06b06b1a72f42b14887f7c8e403d6a88a5d302a658d8a9055e8edbef7"} Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.737001 4799 scope.go:117] "RemoveContainer" containerID="e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.737011 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f64fd4b4c-frnzp" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.812232 4799 scope.go:117] "RemoveContainer" containerID="14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.815608 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f64fd4b4c-frnzp"] Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.839491 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f64fd4b4c-frnzp"] Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.842608 4799 scope.go:117] "RemoveContainer" containerID="e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa" Jan 21 17:56:43 crc kubenswrapper[4799]: E0121 17:56:43.843099 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa\": container with ID starting with e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa not found: ID does not exist" containerID="e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.843147 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa"} err="failed to get container status \"e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa\": rpc error: code = NotFound desc = could not find container \"e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa\": container with ID starting with e49fac59261f18102f99ad0b9b9cb519db5d9d27fda5b47d1827159a95c57bfa not found: ID does not exist" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.843170 4799 scope.go:117] "RemoveContainer" containerID="14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7" Jan 21 17:56:43 crc kubenswrapper[4799]: E0121 17:56:43.843468 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7\": container with ID starting with 14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7 not found: ID does not exist" containerID="14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7" Jan 21 17:56:43 crc kubenswrapper[4799]: I0121 17:56:43.843499 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7"} err="failed to get container status \"14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7\": rpc error: code = NotFound desc = could not find container \"14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7\": container with ID starting with 14af5a88af73f817ad12b3b833b891105364d321198c24bf8f5bd86fb1895db7 not found: ID does not exist" Jan 21 17:56:44 crc kubenswrapper[4799]: I0121 17:56:44.216353 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9efdc452-a321-4a25-adf5-67894f911570" path="/var/lib/kubelet/pods/9efdc452-a321-4a25-adf5-67894f911570/volumes" Jan 21 17:56:46 crc kubenswrapper[4799]: I0121 17:56:46.773930 4799 generic.go:334] "Generic (PLEG): container finished" podID="135b6a22-006b-4270-a559-39fc323570b2" containerID="191abc08878003aecd4013471ed66904789fbc75e7e865367b1aa9ec7fdb72c6" exitCode=0 Jan 21 17:56:46 crc kubenswrapper[4799]: I0121 17:56:46.774074 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"135b6a22-006b-4270-a559-39fc323570b2","Type":"ContainerDied","Data":"191abc08878003aecd4013471ed66904789fbc75e7e865367b1aa9ec7fdb72c6"} Jan 21 17:56:47 crc kubenswrapper[4799]: I0121 17:56:47.785594 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"135b6a22-006b-4270-a559-39fc323570b2","Type":"ContainerStarted","Data":"1b3e7196ad7b13d73025b3c616493d090c3be21df12a8be374a7de7e243b124e"} Jan 21 17:56:47 crc kubenswrapper[4799]: I0121 17:56:47.786060 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 21 17:56:47 crc kubenswrapper[4799]: I0121 17:56:47.787296 4799 generic.go:334] "Generic (PLEG): container finished" podID="88d1e166-bb2f-473e-a955-e79c6251a580" containerID="f8e1006a5e4eac397850d24cc58eeeda03a573d9a2216b34dee7a75e8a470365" exitCode=0 Jan 21 17:56:47 crc kubenswrapper[4799]: I0121 17:56:47.787344 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"88d1e166-bb2f-473e-a955-e79c6251a580","Type":"ContainerDied","Data":"f8e1006a5e4eac397850d24cc58eeeda03a573d9a2216b34dee7a75e8a470365"} Jan 21 17:56:47 crc kubenswrapper[4799]: I0121 17:56:47.856617 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.856595804 podStartE2EDuration="36.856595804s" podCreationTimestamp="2026-01-21 17:56:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:56:47.845380995 +0000 UTC m=+1434.471671028" watchObservedRunningTime="2026-01-21 17:56:47.856595804 +0000 UTC m=+1434.482885827" Jan 21 17:56:48 crc kubenswrapper[4799]: I0121 17:56:48.800607 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"88d1e166-bb2f-473e-a955-e79c6251a580","Type":"ContainerStarted","Data":"8706b653d0692075c48a794166b0dec27707692ef9b262734811f85b150eb4c8"} Jan 21 17:56:48 crc kubenswrapper[4799]: I0121 17:56:48.801722 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:56:48 crc kubenswrapper[4799]: I0121 17:56:48.830651 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.830628349 podStartE2EDuration="36.830628349s" podCreationTimestamp="2026-01-21 17:56:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 17:56:48.822529366 +0000 UTC m=+1435.448819399" watchObservedRunningTime="2026-01-21 17:56:48.830628349 +0000 UTC m=+1435.456918372" Jan 21 17:56:55 crc kubenswrapper[4799]: I0121 17:56:55.970781 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:56:55 crc kubenswrapper[4799]: I0121 17:56:55.971186 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.626336 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd"] Jan 21 17:57:00 crc kubenswrapper[4799]: E0121 17:57:00.627295 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c5bc2f-a942-452f-9904-825ee865bee7" containerName="dnsmasq-dns" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.627309 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c5bc2f-a942-452f-9904-825ee865bee7" containerName="dnsmasq-dns" Jan 21 17:57:00 crc kubenswrapper[4799]: E0121 17:57:00.627330 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c5bc2f-a942-452f-9904-825ee865bee7" containerName="init" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.627335 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c5bc2f-a942-452f-9904-825ee865bee7" containerName="init" Jan 21 17:57:00 crc kubenswrapper[4799]: E0121 17:57:00.627356 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9efdc452-a321-4a25-adf5-67894f911570" containerName="init" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.627364 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9efdc452-a321-4a25-adf5-67894f911570" containerName="init" Jan 21 17:57:00 crc kubenswrapper[4799]: E0121 17:57:00.627390 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9efdc452-a321-4a25-adf5-67894f911570" containerName="dnsmasq-dns" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.627395 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9efdc452-a321-4a25-adf5-67894f911570" containerName="dnsmasq-dns" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.627594 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="82c5bc2f-a942-452f-9904-825ee865bee7" containerName="dnsmasq-dns" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.627606 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9efdc452-a321-4a25-adf5-67894f911570" containerName="dnsmasq-dns" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.628312 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.637467 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.637561 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.637705 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.637786 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.673292 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd"] Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.779042 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lcnw\" (UniqueName: \"kubernetes.io/projected/509437ec-6d22-4843-accb-db316692f6c9-kube-api-access-7lcnw\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.779116 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.779778 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.779849 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.881607 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.881688 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.881829 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lcnw\" (UniqueName: \"kubernetes.io/projected/509437ec-6d22-4843-accb-db316692f6c9-kube-api-access-7lcnw\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.881881 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.889119 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.892518 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.895229 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.899058 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lcnw\" (UniqueName: \"kubernetes.io/projected/509437ec-6d22-4843-accb-db316692f6c9-kube-api-access-7lcnw\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:00 crc kubenswrapper[4799]: I0121 17:57:00.956732 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:01 crc kubenswrapper[4799]: I0121 17:57:01.571309 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd"] Jan 21 17:57:01 crc kubenswrapper[4799]: I0121 17:57:01.990811 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" event={"ID":"509437ec-6d22-4843-accb-db316692f6c9","Type":"ContainerStarted","Data":"54c506284a9b40107a0f23291743e6babac518981b74379ad2f6c30679535e10"} Jan 21 17:57:02 crc kubenswrapper[4799]: I0121 17:57:02.040446 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 21 17:57:03 crc kubenswrapper[4799]: I0121 17:57:03.097350 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 21 17:57:12 crc kubenswrapper[4799]: I0121 17:57:12.112271 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" event={"ID":"509437ec-6d22-4843-accb-db316692f6c9","Type":"ContainerStarted","Data":"1851cb01abaf079fc4f0b377040b0648ffbb63b263e0701118348b9bb705e4c4"} Jan 21 17:57:12 crc kubenswrapper[4799]: I0121 17:57:12.139157 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" podStartSLOduration=1.982775948 podStartE2EDuration="12.139119441s" podCreationTimestamp="2026-01-21 17:57:00 +0000 UTC" firstStartedPulling="2026-01-21 17:57:01.575829538 +0000 UTC m=+1448.202119561" lastFinishedPulling="2026-01-21 17:57:11.732173031 +0000 UTC m=+1458.358463054" observedRunningTime="2026-01-21 17:57:12.126243606 +0000 UTC m=+1458.752533649" watchObservedRunningTime="2026-01-21 17:57:12.139119441 +0000 UTC m=+1458.765409464" Jan 21 17:57:24 crc kubenswrapper[4799]: I0121 17:57:24.268487 4799 generic.go:334] "Generic (PLEG): container finished" podID="509437ec-6d22-4843-accb-db316692f6c9" containerID="1851cb01abaf079fc4f0b377040b0648ffbb63b263e0701118348b9bb705e4c4" exitCode=0 Jan 21 17:57:24 crc kubenswrapper[4799]: I0121 17:57:24.268593 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" event={"ID":"509437ec-6d22-4843-accb-db316692f6c9","Type":"ContainerDied","Data":"1851cb01abaf079fc4f0b377040b0648ffbb63b263e0701118348b9bb705e4c4"} Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.753470 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.911820 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-inventory\") pod \"509437ec-6d22-4843-accb-db316692f6c9\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.911942 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-ssh-key-openstack-edpm-ipam\") pod \"509437ec-6d22-4843-accb-db316692f6c9\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.912022 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-repo-setup-combined-ca-bundle\") pod \"509437ec-6d22-4843-accb-db316692f6c9\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.912067 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lcnw\" (UniqueName: \"kubernetes.io/projected/509437ec-6d22-4843-accb-db316692f6c9-kube-api-access-7lcnw\") pod \"509437ec-6d22-4843-accb-db316692f6c9\" (UID: \"509437ec-6d22-4843-accb-db316692f6c9\") " Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.919602 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/509437ec-6d22-4843-accb-db316692f6c9-kube-api-access-7lcnw" (OuterVolumeSpecName: "kube-api-access-7lcnw") pod "509437ec-6d22-4843-accb-db316692f6c9" (UID: "509437ec-6d22-4843-accb-db316692f6c9"). InnerVolumeSpecName "kube-api-access-7lcnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.920997 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "509437ec-6d22-4843-accb-db316692f6c9" (UID: "509437ec-6d22-4843-accb-db316692f6c9"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.942459 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "509437ec-6d22-4843-accb-db316692f6c9" (UID: "509437ec-6d22-4843-accb-db316692f6c9"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.947958 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-inventory" (OuterVolumeSpecName: "inventory") pod "509437ec-6d22-4843-accb-db316692f6c9" (UID: "509437ec-6d22-4843-accb-db316692f6c9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.971311 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:57:25 crc kubenswrapper[4799]: I0121 17:57:25.971419 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.014456 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.014506 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.014524 4799 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509437ec-6d22-4843-accb-db316692f6c9-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.014539 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lcnw\" (UniqueName: \"kubernetes.io/projected/509437ec-6d22-4843-accb-db316692f6c9-kube-api-access-7lcnw\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.297104 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" event={"ID":"509437ec-6d22-4843-accb-db316692f6c9","Type":"ContainerDied","Data":"54c506284a9b40107a0f23291743e6babac518981b74379ad2f6c30679535e10"} Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.297552 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54c506284a9b40107a0f23291743e6babac518981b74379ad2f6c30679535e10" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.297318 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.403613 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb"] Jan 21 17:57:26 crc kubenswrapper[4799]: E0121 17:57:26.404490 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="509437ec-6d22-4843-accb-db316692f6c9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.404516 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="509437ec-6d22-4843-accb-db316692f6c9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.404820 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="509437ec-6d22-4843-accb-db316692f6c9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.405935 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.412207 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.412513 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.413454 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.413637 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.421280 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb"] Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.524732 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxk2n\" (UniqueName: \"kubernetes.io/projected/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-kube-api-access-kxk2n\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.524793 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.524863 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.627529 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxk2n\" (UniqueName: \"kubernetes.io/projected/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-kube-api-access-kxk2n\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.627622 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.627759 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.632064 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.632527 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.643695 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxk2n\" (UniqueName: \"kubernetes.io/projected/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-kube-api-access-kxk2n\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-f8tvb\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:26 crc kubenswrapper[4799]: I0121 17:57:26.729024 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:27 crc kubenswrapper[4799]: I0121 17:57:27.306201 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb"] Jan 21 17:57:27 crc kubenswrapper[4799]: I0121 17:57:27.312203 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" event={"ID":"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d","Type":"ContainerStarted","Data":"6af0398f32aefdac9d5f50a307e2d094aeb672979a631929e687d133db734e15"} Jan 21 17:57:28 crc kubenswrapper[4799]: I0121 17:57:28.327950 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" event={"ID":"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d","Type":"ContainerStarted","Data":"fb33b98aeb31eacd73a1b55b99fef5ed484c03e982d3c80eadc0592828c0594d"} Jan 21 17:57:28 crc kubenswrapper[4799]: I0121 17:57:28.365471 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" podStartSLOduration=1.838832566 podStartE2EDuration="2.365441145s" podCreationTimestamp="2026-01-21 17:57:26 +0000 UTC" firstStartedPulling="2026-01-21 17:57:27.30289276 +0000 UTC m=+1473.929182783" lastFinishedPulling="2026-01-21 17:57:27.829501299 +0000 UTC m=+1474.455791362" observedRunningTime="2026-01-21 17:57:28.353400054 +0000 UTC m=+1474.979690087" watchObservedRunningTime="2026-01-21 17:57:28.365441145 +0000 UTC m=+1474.991731168" Jan 21 17:57:28 crc kubenswrapper[4799]: I0121 17:57:28.991009 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lf92c"] Jan 21 17:57:28 crc kubenswrapper[4799]: I0121 17:57:28.995431 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.003063 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lf92c"] Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.107665 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-catalog-content\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.107745 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n72cp\" (UniqueName: \"kubernetes.io/projected/a295812d-d53c-4097-86db-1f2e3d6b9dd2-kube-api-access-n72cp\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.108076 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-utilities\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.210291 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-catalog-content\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.210391 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n72cp\" (UniqueName: \"kubernetes.io/projected/a295812d-d53c-4097-86db-1f2e3d6b9dd2-kube-api-access-n72cp\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.210453 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-utilities\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.210830 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-catalog-content\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.210876 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-utilities\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.237467 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n72cp\" (UniqueName: \"kubernetes.io/projected/a295812d-d53c-4097-86db-1f2e3d6b9dd2-kube-api-access-n72cp\") pod \"redhat-operators-lf92c\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.317281 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:29 crc kubenswrapper[4799]: W0121 17:57:29.838541 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda295812d_d53c_4097_86db_1f2e3d6b9dd2.slice/crio-1a7cefd94882884e4f71d30349c200c6708bc02143e520220fbeb680a2065000 WatchSource:0}: Error finding container 1a7cefd94882884e4f71d30349c200c6708bc02143e520220fbeb680a2065000: Status 404 returned error can't find the container with id 1a7cefd94882884e4f71d30349c200c6708bc02143e520220fbeb680a2065000 Jan 21 17:57:29 crc kubenswrapper[4799]: I0121 17:57:29.840432 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lf92c"] Jan 21 17:57:30 crc kubenswrapper[4799]: I0121 17:57:30.358074 4799 generic.go:334] "Generic (PLEG): container finished" podID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerID="27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff" exitCode=0 Jan 21 17:57:30 crc kubenswrapper[4799]: I0121 17:57:30.358230 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf92c" event={"ID":"a295812d-d53c-4097-86db-1f2e3d6b9dd2","Type":"ContainerDied","Data":"27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff"} Jan 21 17:57:30 crc kubenswrapper[4799]: I0121 17:57:30.358391 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf92c" event={"ID":"a295812d-d53c-4097-86db-1f2e3d6b9dd2","Type":"ContainerStarted","Data":"1a7cefd94882884e4f71d30349c200c6708bc02143e520220fbeb680a2065000"} Jan 21 17:57:31 crc kubenswrapper[4799]: I0121 17:57:31.373928 4799 generic.go:334] "Generic (PLEG): container finished" podID="7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d" containerID="fb33b98aeb31eacd73a1b55b99fef5ed484c03e982d3c80eadc0592828c0594d" exitCode=0 Jan 21 17:57:31 crc kubenswrapper[4799]: I0121 17:57:31.374027 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" event={"ID":"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d","Type":"ContainerDied","Data":"fb33b98aeb31eacd73a1b55b99fef5ed484c03e982d3c80eadc0592828c0594d"} Jan 21 17:57:32 crc kubenswrapper[4799]: I0121 17:57:32.390845 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf92c" event={"ID":"a295812d-d53c-4097-86db-1f2e3d6b9dd2","Type":"ContainerStarted","Data":"fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a"} Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.225061 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.342794 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam\") pod \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.342998 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxk2n\" (UniqueName: \"kubernetes.io/projected/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-kube-api-access-kxk2n\") pod \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.343025 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-inventory\") pod \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.350977 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-kube-api-access-kxk2n" (OuterVolumeSpecName: "kube-api-access-kxk2n") pod "7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d" (UID: "7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d"). InnerVolumeSpecName "kube-api-access-kxk2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:57:33 crc kubenswrapper[4799]: E0121 17:57:33.377590 4799 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam podName:7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d nodeName:}" failed. No retries permitted until 2026-01-21 17:57:33.87754381 +0000 UTC m=+1480.503833833 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key-openstack-edpm-ipam" (UniqueName: "kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam") pod "7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d" (UID: "7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d") : error deleting /var/lib/kubelet/pods/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d/volume-subpaths: remove /var/lib/kubelet/pods/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d/volume-subpaths: no such file or directory Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.381620 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-inventory" (OuterVolumeSpecName: "inventory") pod "7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d" (UID: "7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.411380 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" event={"ID":"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d","Type":"ContainerDied","Data":"6af0398f32aefdac9d5f50a307e2d094aeb672979a631929e687d133db734e15"} Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.411464 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-f8tvb" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.411479 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6af0398f32aefdac9d5f50a307e2d094aeb672979a631929e687d133db734e15" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.449483 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxk2n\" (UniqueName: \"kubernetes.io/projected/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-kube-api-access-kxk2n\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.449516 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.478690 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp"] Jan 21 17:57:33 crc kubenswrapper[4799]: E0121 17:57:33.479317 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.479345 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.479641 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.480560 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.490194 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp"] Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.653439 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bff5\" (UniqueName: \"kubernetes.io/projected/7f2d9e34-479a-44ae-b64e-55baf5645dfc-kube-api-access-4bff5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.653603 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.653675 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.653796 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.755392 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bff5\" (UniqueName: \"kubernetes.io/projected/7f2d9e34-479a-44ae-b64e-55baf5645dfc-kube-api-access-4bff5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.755514 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.755576 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.755673 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.763658 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.763934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.764573 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.778624 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bff5\" (UniqueName: \"kubernetes.io/projected/7f2d9e34-479a-44ae-b64e-55baf5645dfc-kube-api-access-4bff5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:33 crc kubenswrapper[4799]: I0121 17:57:33.870372 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 17:57:34 crc kubenswrapper[4799]: I0121 17:57:33.959826 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam\") pod \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\" (UID: \"7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d\") " Jan 21 17:57:34 crc kubenswrapper[4799]: I0121 17:57:33.963961 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d" (UID: "7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 17:57:34 crc kubenswrapper[4799]: I0121 17:57:34.062769 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:34 crc kubenswrapper[4799]: I0121 17:57:34.425196 4799 generic.go:334] "Generic (PLEG): container finished" podID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerID="fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a" exitCode=0 Jan 21 17:57:34 crc kubenswrapper[4799]: I0121 17:57:34.425312 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf92c" event={"ID":"a295812d-d53c-4097-86db-1f2e3d6b9dd2","Type":"ContainerDied","Data":"fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a"} Jan 21 17:57:34 crc kubenswrapper[4799]: W0121 17:57:34.936559 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f2d9e34_479a_44ae_b64e_55baf5645dfc.slice/crio-7493f6ffff96b82bdf893b6022ab8a7c4c702dac8dba9ced7c9974a35a93f178 WatchSource:0}: Error finding container 7493f6ffff96b82bdf893b6022ab8a7c4c702dac8dba9ced7c9974a35a93f178: Status 404 returned error can't find the container with id 7493f6ffff96b82bdf893b6022ab8a7c4c702dac8dba9ced7c9974a35a93f178 Jan 21 17:57:34 crc kubenswrapper[4799]: I0121 17:57:34.942676 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp"] Jan 21 17:57:35 crc kubenswrapper[4799]: I0121 17:57:35.453516 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf92c" event={"ID":"a295812d-d53c-4097-86db-1f2e3d6b9dd2","Type":"ContainerStarted","Data":"d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0"} Jan 21 17:57:35 crc kubenswrapper[4799]: I0121 17:57:35.456544 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" event={"ID":"7f2d9e34-479a-44ae-b64e-55baf5645dfc","Type":"ContainerStarted","Data":"7493f6ffff96b82bdf893b6022ab8a7c4c702dac8dba9ced7c9974a35a93f178"} Jan 21 17:57:35 crc kubenswrapper[4799]: I0121 17:57:35.482026 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lf92c" podStartSLOduration=3.0304394 podStartE2EDuration="7.482004121s" podCreationTimestamp="2026-01-21 17:57:28 +0000 UTC" firstStartedPulling="2026-01-21 17:57:30.360808178 +0000 UTC m=+1476.987098201" lastFinishedPulling="2026-01-21 17:57:34.812372899 +0000 UTC m=+1481.438662922" observedRunningTime="2026-01-21 17:57:35.47979768 +0000 UTC m=+1482.106087733" watchObservedRunningTime="2026-01-21 17:57:35.482004121 +0000 UTC m=+1482.108294154" Jan 21 17:57:36 crc kubenswrapper[4799]: I0121 17:57:36.287723 4799 scope.go:117] "RemoveContainer" containerID="2904f03ad517fa9f0976131f578ba33ac0977a131bf57e2229450a99359c1801" Jan 21 17:57:36 crc kubenswrapper[4799]: I0121 17:57:36.332910 4799 scope.go:117] "RemoveContainer" containerID="eb2904c5b8474e7c920a9a6f608841e76b0952943dd1ba4a3905acac4aa41449" Jan 21 17:57:36 crc kubenswrapper[4799]: I0121 17:57:36.391409 4799 scope.go:117] "RemoveContainer" containerID="e19b9f0e038c9ded9ea4b11681266954f4ea8cb749b3b051e6c9dbd2bb1f658d" Jan 21 17:57:36 crc kubenswrapper[4799]: I0121 17:57:36.474716 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" event={"ID":"7f2d9e34-479a-44ae-b64e-55baf5645dfc","Type":"ContainerStarted","Data":"8e0def670c763515c6a258ea671e54fb0586c38bf3b0e5dc67858c403e01a414"} Jan 21 17:57:36 crc kubenswrapper[4799]: I0121 17:57:36.508751 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" podStartSLOduration=3.107872776 podStartE2EDuration="3.508713667s" podCreationTimestamp="2026-01-21 17:57:33 +0000 UTC" firstStartedPulling="2026-01-21 17:57:34.940464581 +0000 UTC m=+1481.566754594" lastFinishedPulling="2026-01-21 17:57:35.341305432 +0000 UTC m=+1481.967595485" observedRunningTime="2026-01-21 17:57:36.496536341 +0000 UTC m=+1483.122826394" watchObservedRunningTime="2026-01-21 17:57:36.508713667 +0000 UTC m=+1483.135003690" Jan 21 17:57:39 crc kubenswrapper[4799]: I0121 17:57:39.318182 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:39 crc kubenswrapper[4799]: I0121 17:57:39.318637 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:40 crc kubenswrapper[4799]: I0121 17:57:40.375528 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lf92c" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="registry-server" probeResult="failure" output=< Jan 21 17:57:40 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 17:57:40 crc kubenswrapper[4799]: > Jan 21 17:57:49 crc kubenswrapper[4799]: I0121 17:57:49.380574 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:49 crc kubenswrapper[4799]: I0121 17:57:49.440842 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:49 crc kubenswrapper[4799]: I0121 17:57:49.619894 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lf92c"] Jan 21 17:57:50 crc kubenswrapper[4799]: I0121 17:57:50.668275 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lf92c" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="registry-server" containerID="cri-o://d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0" gracePeriod=2 Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.171986 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.234848 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-utilities\") pod \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.234984 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n72cp\" (UniqueName: \"kubernetes.io/projected/a295812d-d53c-4097-86db-1f2e3d6b9dd2-kube-api-access-n72cp\") pod \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.235053 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-catalog-content\") pod \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\" (UID: \"a295812d-d53c-4097-86db-1f2e3d6b9dd2\") " Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.235995 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-utilities" (OuterVolumeSpecName: "utilities") pod "a295812d-d53c-4097-86db-1f2e3d6b9dd2" (UID: "a295812d-d53c-4097-86db-1f2e3d6b9dd2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.241716 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a295812d-d53c-4097-86db-1f2e3d6b9dd2-kube-api-access-n72cp" (OuterVolumeSpecName: "kube-api-access-n72cp") pod "a295812d-d53c-4097-86db-1f2e3d6b9dd2" (UID: "a295812d-d53c-4097-86db-1f2e3d6b9dd2"). InnerVolumeSpecName "kube-api-access-n72cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.338519 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.338561 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n72cp\" (UniqueName: \"kubernetes.io/projected/a295812d-d53c-4097-86db-1f2e3d6b9dd2-kube-api-access-n72cp\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.362931 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a295812d-d53c-4097-86db-1f2e3d6b9dd2" (UID: "a295812d-d53c-4097-86db-1f2e3d6b9dd2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.440884 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a295812d-d53c-4097-86db-1f2e3d6b9dd2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.700724 4799 generic.go:334] "Generic (PLEG): container finished" podID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerID="d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0" exitCode=0 Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.700769 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf92c" event={"ID":"a295812d-d53c-4097-86db-1f2e3d6b9dd2","Type":"ContainerDied","Data":"d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0"} Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.700796 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf92c" event={"ID":"a295812d-d53c-4097-86db-1f2e3d6b9dd2","Type":"ContainerDied","Data":"1a7cefd94882884e4f71d30349c200c6708bc02143e520220fbeb680a2065000"} Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.700808 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lf92c" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.700830 4799 scope.go:117] "RemoveContainer" containerID="d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.725932 4799 scope.go:117] "RemoveContainer" containerID="fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.756267 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lf92c"] Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.768583 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lf92c"] Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.780229 4799 scope.go:117] "RemoveContainer" containerID="27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.823322 4799 scope.go:117] "RemoveContainer" containerID="d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0" Jan 21 17:57:51 crc kubenswrapper[4799]: E0121 17:57:51.824005 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0\": container with ID starting with d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0 not found: ID does not exist" containerID="d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.824057 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0"} err="failed to get container status \"d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0\": rpc error: code = NotFound desc = could not find container \"d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0\": container with ID starting with d2818e0caccc03508ee21a208411e2517d06f0783b661b4d61949bdba9fad6d0 not found: ID does not exist" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.824092 4799 scope.go:117] "RemoveContainer" containerID="fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a" Jan 21 17:57:51 crc kubenswrapper[4799]: E0121 17:57:51.824770 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a\": container with ID starting with fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a not found: ID does not exist" containerID="fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.824833 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a"} err="failed to get container status \"fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a\": rpc error: code = NotFound desc = could not find container \"fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a\": container with ID starting with fbf12a6f65ca6a67e2406657de1940c27083aa36aec2ffd634254017ae739c4a not found: ID does not exist" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.824872 4799 scope.go:117] "RemoveContainer" containerID="27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff" Jan 21 17:57:51 crc kubenswrapper[4799]: E0121 17:57:51.826881 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff\": container with ID starting with 27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff not found: ID does not exist" containerID="27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff" Jan 21 17:57:51 crc kubenswrapper[4799]: I0121 17:57:51.826913 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff"} err="failed to get container status \"27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff\": rpc error: code = NotFound desc = could not find container \"27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff\": container with ID starting with 27c583999260990ecae81c6d0ccf2f7fba28e25d5e2d663a526d6c5f4d54c7ff not found: ID does not exist" Jan 21 17:57:52 crc kubenswrapper[4799]: I0121 17:57:52.222745 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" path="/var/lib/kubelet/pods/a295812d-d53c-4097-86db-1f2e3d6b9dd2/volumes" Jan 21 17:57:55 crc kubenswrapper[4799]: I0121 17:57:55.971116 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 17:57:55 crc kubenswrapper[4799]: I0121 17:57:55.971518 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 17:57:55 crc kubenswrapper[4799]: I0121 17:57:55.971596 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 17:57:55 crc kubenswrapper[4799]: I0121 17:57:55.972600 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1b8c7c0681d7bc6849b2f8af2f982319bd30fd0c75b4bf458a9fe26a72616dd0"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 17:57:55 crc kubenswrapper[4799]: I0121 17:57:55.972691 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://1b8c7c0681d7bc6849b2f8af2f982319bd30fd0c75b4bf458a9fe26a72616dd0" gracePeriod=600 Jan 21 17:57:56 crc kubenswrapper[4799]: I0121 17:57:56.761441 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="1b8c7c0681d7bc6849b2f8af2f982319bd30fd0c75b4bf458a9fe26a72616dd0" exitCode=0 Jan 21 17:57:56 crc kubenswrapper[4799]: I0121 17:57:56.761493 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"1b8c7c0681d7bc6849b2f8af2f982319bd30fd0c75b4bf458a9fe26a72616dd0"} Jan 21 17:57:56 crc kubenswrapper[4799]: I0121 17:57:56.761961 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861"} Jan 21 17:57:56 crc kubenswrapper[4799]: I0121 17:57:56.761988 4799 scope.go:117] "RemoveContainer" containerID="5fdd831026afa966e0f760fecb7476b95aadfcd525b00468c8c89ce1d2df0632" Jan 21 17:58:15 crc kubenswrapper[4799]: I0121 17:58:15.634352 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-749b6794b5-k8rw7" podUID="f7542699-9beb-4966-b1e4-b3c3cb9b42ff" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Jan 21 17:58:36 crc kubenswrapper[4799]: I0121 17:58:36.530876 4799 scope.go:117] "RemoveContainer" containerID="8f23bc960d06b776a628754f163280218abe7c353c40a867d70e231f9308d1ec" Jan 21 17:58:36 crc kubenswrapper[4799]: I0121 17:58:36.578490 4799 scope.go:117] "RemoveContainer" containerID="3a209a83cb06172c4f8de2be0e4ba44d2dbbc26a8af1dd62cab91b68e57db928" Jan 21 17:58:36 crc kubenswrapper[4799]: I0121 17:58:36.661246 4799 scope.go:117] "RemoveContainer" containerID="ab1356f09185e5bcd03fc2bf91986077d64da5b25ade94ba4110f03988ec963f" Jan 21 17:58:36 crc kubenswrapper[4799]: I0121 17:58:36.702561 4799 scope.go:117] "RemoveContainer" containerID="11f21f7e5deaa70ee0d77740377532b2349b1405cb6eb0d5c203aacc4806a2a7" Jan 21 17:58:36 crc kubenswrapper[4799]: I0121 17:58:36.725788 4799 scope.go:117] "RemoveContainer" containerID="e331da72beab4e6eb2351cf8e7e1bf76b4b5b46cb290cf8f998552993f9545c4" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.104349 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-szcgd"] Jan 21 17:59:04 crc kubenswrapper[4799]: E0121 17:59:04.105490 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="extract-content" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.105509 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="extract-content" Jan 21 17:59:04 crc kubenswrapper[4799]: E0121 17:59:04.105536 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="extract-utilities" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.105547 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="extract-utilities" Jan 21 17:59:04 crc kubenswrapper[4799]: E0121 17:59:04.105581 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="registry-server" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.105658 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="registry-server" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.106038 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a295812d-d53c-4097-86db-1f2e3d6b9dd2" containerName="registry-server" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.107954 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.130227 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-szcgd"] Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.285878 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85sx9\" (UniqueName: \"kubernetes.io/projected/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-kube-api-access-85sx9\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.285976 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-catalog-content\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.286051 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-utilities\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.388677 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85sx9\" (UniqueName: \"kubernetes.io/projected/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-kube-api-access-85sx9\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.388774 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-catalog-content\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.388861 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-utilities\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.389718 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-utilities\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.389719 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-catalog-content\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.415743 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85sx9\" (UniqueName: \"kubernetes.io/projected/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-kube-api-access-85sx9\") pod \"certified-operators-szcgd\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.431830 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:04 crc kubenswrapper[4799]: I0121 17:59:04.989413 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-szcgd"] Jan 21 17:59:04 crc kubenswrapper[4799]: W0121 17:59:04.990795 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ac7e4c3_6f65_4d28_bee6_11aca85e1b0e.slice/crio-e04e5b28e015e1b7cca7e77c1ffad231c6af3279aea9835677e4d9eb6943b168 WatchSource:0}: Error finding container e04e5b28e015e1b7cca7e77c1ffad231c6af3279aea9835677e4d9eb6943b168: Status 404 returned error can't find the container with id e04e5b28e015e1b7cca7e77c1ffad231c6af3279aea9835677e4d9eb6943b168 Jan 21 17:59:05 crc kubenswrapper[4799]: I0121 17:59:05.701518 4799 generic.go:334] "Generic (PLEG): container finished" podID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerID="b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12" exitCode=0 Jan 21 17:59:05 crc kubenswrapper[4799]: I0121 17:59:05.701990 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szcgd" event={"ID":"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e","Type":"ContainerDied","Data":"b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12"} Jan 21 17:59:05 crc kubenswrapper[4799]: I0121 17:59:05.702030 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szcgd" event={"ID":"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e","Type":"ContainerStarted","Data":"e04e5b28e015e1b7cca7e77c1ffad231c6af3279aea9835677e4d9eb6943b168"} Jan 21 17:59:05 crc kubenswrapper[4799]: I0121 17:59:05.703565 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 17:59:07 crc kubenswrapper[4799]: I0121 17:59:07.721785 4799 generic.go:334] "Generic (PLEG): container finished" podID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerID="48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039" exitCode=0 Jan 21 17:59:07 crc kubenswrapper[4799]: I0121 17:59:07.721888 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szcgd" event={"ID":"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e","Type":"ContainerDied","Data":"48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039"} Jan 21 17:59:08 crc kubenswrapper[4799]: I0121 17:59:08.737122 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szcgd" event={"ID":"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e","Type":"ContainerStarted","Data":"3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa"} Jan 21 17:59:08 crc kubenswrapper[4799]: I0121 17:59:08.765583 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-szcgd" podStartSLOduration=2.366289959 podStartE2EDuration="4.765556097s" podCreationTimestamp="2026-01-21 17:59:04 +0000 UTC" firstStartedPulling="2026-01-21 17:59:05.703328336 +0000 UTC m=+1572.329618359" lastFinishedPulling="2026-01-21 17:59:08.102594464 +0000 UTC m=+1574.728884497" observedRunningTime="2026-01-21 17:59:08.757833764 +0000 UTC m=+1575.384123847" watchObservedRunningTime="2026-01-21 17:59:08.765556097 +0000 UTC m=+1575.391846130" Jan 21 17:59:14 crc kubenswrapper[4799]: I0121 17:59:14.432571 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:14 crc kubenswrapper[4799]: I0121 17:59:14.433428 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:14 crc kubenswrapper[4799]: I0121 17:59:14.489010 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:14 crc kubenswrapper[4799]: I0121 17:59:14.853044 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:14 crc kubenswrapper[4799]: I0121 17:59:14.938462 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-szcgd"] Jan 21 17:59:16 crc kubenswrapper[4799]: I0121 17:59:16.823259 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-szcgd" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerName="registry-server" containerID="cri-o://3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa" gracePeriod=2 Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.327734 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.424101 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-catalog-content\") pod \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.424305 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-utilities\") pod \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.424378 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85sx9\" (UniqueName: \"kubernetes.io/projected/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-kube-api-access-85sx9\") pod \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\" (UID: \"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e\") " Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.425235 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-utilities" (OuterVolumeSpecName: "utilities") pod "5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" (UID: "5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.430693 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-kube-api-access-85sx9" (OuterVolumeSpecName: "kube-api-access-85sx9") pod "5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" (UID: "5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e"). InnerVolumeSpecName "kube-api-access-85sx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.468442 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" (UID: "5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.527404 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.527460 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.527479 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85sx9\" (UniqueName: \"kubernetes.io/projected/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e-kube-api-access-85sx9\") on node \"crc\" DevicePath \"\"" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.835984 4799 generic.go:334] "Generic (PLEG): container finished" podID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerID="3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa" exitCode=0 Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.836048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szcgd" event={"ID":"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e","Type":"ContainerDied","Data":"3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa"} Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.836090 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-szcgd" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.836109 4799 scope.go:117] "RemoveContainer" containerID="3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.836096 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szcgd" event={"ID":"5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e","Type":"ContainerDied","Data":"e04e5b28e015e1b7cca7e77c1ffad231c6af3279aea9835677e4d9eb6943b168"} Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.935929 4799 scope.go:117] "RemoveContainer" containerID="48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039" Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.960510 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-szcgd"] Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.969469 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-szcgd"] Jan 21 17:59:17 crc kubenswrapper[4799]: I0121 17:59:17.977383 4799 scope.go:117] "RemoveContainer" containerID="b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12" Jan 21 17:59:18 crc kubenswrapper[4799]: I0121 17:59:18.012040 4799 scope.go:117] "RemoveContainer" containerID="3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa" Jan 21 17:59:18 crc kubenswrapper[4799]: E0121 17:59:18.012504 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa\": container with ID starting with 3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa not found: ID does not exist" containerID="3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa" Jan 21 17:59:18 crc kubenswrapper[4799]: I0121 17:59:18.012536 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa"} err="failed to get container status \"3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa\": rpc error: code = NotFound desc = could not find container \"3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa\": container with ID starting with 3f121966454a0ca8f0c9204141a153e3361b936dab2a86889835b6becc2c36fa not found: ID does not exist" Jan 21 17:59:18 crc kubenswrapper[4799]: I0121 17:59:18.012561 4799 scope.go:117] "RemoveContainer" containerID="48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039" Jan 21 17:59:18 crc kubenswrapper[4799]: E0121 17:59:18.012968 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039\": container with ID starting with 48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039 not found: ID does not exist" containerID="48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039" Jan 21 17:59:18 crc kubenswrapper[4799]: I0121 17:59:18.013030 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039"} err="failed to get container status \"48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039\": rpc error: code = NotFound desc = could not find container \"48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039\": container with ID starting with 48f64a18d907a2c3aba9f25d85c901dfbafc398b5abc61e3393904edff9d3039 not found: ID does not exist" Jan 21 17:59:18 crc kubenswrapper[4799]: I0121 17:59:18.013044 4799 scope.go:117] "RemoveContainer" containerID="b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12" Jan 21 17:59:18 crc kubenswrapper[4799]: E0121 17:59:18.013548 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12\": container with ID starting with b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12 not found: ID does not exist" containerID="b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12" Jan 21 17:59:18 crc kubenswrapper[4799]: I0121 17:59:18.013573 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12"} err="failed to get container status \"b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12\": rpc error: code = NotFound desc = could not find container \"b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12\": container with ID starting with b743f19ba39ede7d113b238f3d6a52f6d417d6dc5cc043c6f29b001ea729be12 not found: ID does not exist" Jan 21 17:59:18 crc kubenswrapper[4799]: I0121 17:59:18.218843 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" path="/var/lib/kubelet/pods/5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e/volumes" Jan 21 17:59:36 crc kubenswrapper[4799]: I0121 17:59:36.962476 4799 scope.go:117] "RemoveContainer" containerID="a6183f03da598d687e7dcc3db8033af4efa220d4e56c96b8925d84fc692ea091" Jan 21 17:59:36 crc kubenswrapper[4799]: I0121 17:59:36.995797 4799 scope.go:117] "RemoveContainer" containerID="b03bdbe200981c553eeefe9cf4d3dc8468454e2e651c397a5bb471b84255b29f" Jan 21 17:59:37 crc kubenswrapper[4799]: I0121 17:59:37.022211 4799 scope.go:117] "RemoveContainer" containerID="5db2fe2be2fee23676dafc35c5104e8f1d5377f39bc33ba233590f44fc108599" Jan 21 17:59:37 crc kubenswrapper[4799]: I0121 17:59:37.047660 4799 scope.go:117] "RemoveContainer" containerID="8926411eb04e63565c5b583071005e198c2fbad3bfe9f0d039f0483c2712051f" Jan 21 17:59:37 crc kubenswrapper[4799]: I0121 17:59:37.068424 4799 scope.go:117] "RemoveContainer" containerID="e83eb221c56ae4873a8c5f4a34e4e1eed2f47fbf1c0d1c3d3e407ec8783d04e0" Jan 21 17:59:37 crc kubenswrapper[4799]: I0121 17:59:37.090569 4799 scope.go:117] "RemoveContainer" containerID="23932414f560b8204d12801ee82b40fe3fbc4348408ab863b26c4af0fd32d52a" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.174290 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq"] Jan 21 18:00:00 crc kubenswrapper[4799]: E0121 18:00:00.175304 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerName="extract-content" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.175319 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerName="extract-content" Jan 21 18:00:00 crc kubenswrapper[4799]: E0121 18:00:00.175335 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerName="extract-utilities" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.175341 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerName="extract-utilities" Jan 21 18:00:00 crc kubenswrapper[4799]: E0121 18:00:00.175367 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerName="registry-server" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.175373 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerName="registry-server" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.175567 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ac7e4c3-6f65-4d28-bee6-11aca85e1b0e" containerName="registry-server" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.176475 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.179857 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.180562 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.186495 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq"] Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.326596 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlvlh\" (UniqueName: \"kubernetes.io/projected/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-kube-api-access-hlvlh\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.326680 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-config-volume\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.326763 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-secret-volume\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.429170 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlvlh\" (UniqueName: \"kubernetes.io/projected/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-kube-api-access-hlvlh\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.429446 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-config-volume\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.429513 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-secret-volume\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.431453 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-config-volume\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.441280 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-secret-volume\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.451878 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlvlh\" (UniqueName: \"kubernetes.io/projected/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-kube-api-access-hlvlh\") pod \"collect-profiles-29483640-l9qbq\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.505223 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:00 crc kubenswrapper[4799]: I0121 18:00:00.974817 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq"] Jan 21 18:00:01 crc kubenswrapper[4799]: I0121 18:00:01.492515 4799 generic.go:334] "Generic (PLEG): container finished" podID="c82d9473-3cf4-4119-8ffe-9a2ab7bafd81" containerID="92697d8c761a2b0f5621f465332d14d8bdaded0b115501864ba638dffc4ffe9c" exitCode=0 Jan 21 18:00:01 crc kubenswrapper[4799]: I0121 18:00:01.492609 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" event={"ID":"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81","Type":"ContainerDied","Data":"92697d8c761a2b0f5621f465332d14d8bdaded0b115501864ba638dffc4ffe9c"} Jan 21 18:00:01 crc kubenswrapper[4799]: I0121 18:00:01.492846 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" event={"ID":"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81","Type":"ContainerStarted","Data":"01f6cb31b929fa1f6697800e1ca95f229dde0ff5e5baadb3e58e7227204a2c6e"} Jan 21 18:00:02 crc kubenswrapper[4799]: I0121 18:00:02.912063 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:02 crc kubenswrapper[4799]: I0121 18:00:02.989490 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-secret-volume\") pod \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " Jan 21 18:00:02 crc kubenswrapper[4799]: I0121 18:00:02.989986 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlvlh\" (UniqueName: \"kubernetes.io/projected/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-kube-api-access-hlvlh\") pod \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " Jan 21 18:00:02 crc kubenswrapper[4799]: I0121 18:00:02.990065 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-config-volume\") pod \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\" (UID: \"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81\") " Jan 21 18:00:02 crc kubenswrapper[4799]: I0121 18:00:02.990711 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-config-volume" (OuterVolumeSpecName: "config-volume") pod "c82d9473-3cf4-4119-8ffe-9a2ab7bafd81" (UID: "c82d9473-3cf4-4119-8ffe-9a2ab7bafd81"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:00:02 crc kubenswrapper[4799]: I0121 18:00:02.991072 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:02 crc kubenswrapper[4799]: I0121 18:00:02.996845 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c82d9473-3cf4-4119-8ffe-9a2ab7bafd81" (UID: "c82d9473-3cf4-4119-8ffe-9a2ab7bafd81"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:00:03 crc kubenswrapper[4799]: I0121 18:00:03.003401 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-kube-api-access-hlvlh" (OuterVolumeSpecName: "kube-api-access-hlvlh") pod "c82d9473-3cf4-4119-8ffe-9a2ab7bafd81" (UID: "c82d9473-3cf4-4119-8ffe-9a2ab7bafd81"). InnerVolumeSpecName "kube-api-access-hlvlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:00:03 crc kubenswrapper[4799]: I0121 18:00:03.093156 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:03 crc kubenswrapper[4799]: I0121 18:00:03.093212 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlvlh\" (UniqueName: \"kubernetes.io/projected/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81-kube-api-access-hlvlh\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:03 crc kubenswrapper[4799]: I0121 18:00:03.521038 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" event={"ID":"c82d9473-3cf4-4119-8ffe-9a2ab7bafd81","Type":"ContainerDied","Data":"01f6cb31b929fa1f6697800e1ca95f229dde0ff5e5baadb3e58e7227204a2c6e"} Jan 21 18:00:03 crc kubenswrapper[4799]: I0121 18:00:03.521117 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01f6cb31b929fa1f6697800e1ca95f229dde0ff5e5baadb3e58e7227204a2c6e" Jan 21 18:00:03 crc kubenswrapper[4799]: I0121 18:00:03.521178 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq" Jan 21 18:00:25 crc kubenswrapper[4799]: I0121 18:00:25.970561 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:00:25 crc kubenswrapper[4799]: I0121 18:00:25.971101 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.597035 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c78fg"] Jan 21 18:00:35 crc kubenswrapper[4799]: E0121 18:00:35.599329 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c82d9473-3cf4-4119-8ffe-9a2ab7bafd81" containerName="collect-profiles" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.599443 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c82d9473-3cf4-4119-8ffe-9a2ab7bafd81" containerName="collect-profiles" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.599842 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c82d9473-3cf4-4119-8ffe-9a2ab7bafd81" containerName="collect-profiles" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.601960 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.620275 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c78fg"] Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.784736 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-catalog-content\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.785177 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxllr\" (UniqueName: \"kubernetes.io/projected/f5f07422-440a-44be-9f28-f3b2ee04f29c-kube-api-access-mxllr\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.785304 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-utilities\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.887872 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-utilities\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.888275 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-catalog-content\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.888615 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxllr\" (UniqueName: \"kubernetes.io/projected/f5f07422-440a-44be-9f28-f3b2ee04f29c-kube-api-access-mxllr\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.888813 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-catalog-content\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.890213 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-utilities\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.908970 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxllr\" (UniqueName: \"kubernetes.io/projected/f5f07422-440a-44be-9f28-f3b2ee04f29c-kube-api-access-mxllr\") pod \"community-operators-c78fg\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:35 crc kubenswrapper[4799]: I0121 18:00:35.966619 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:36 crc kubenswrapper[4799]: I0121 18:00:36.445179 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c78fg"] Jan 21 18:00:37 crc kubenswrapper[4799]: I0121 18:00:37.178832 4799 generic.go:334] "Generic (PLEG): container finished" podID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerID="25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce" exitCode=0 Jan 21 18:00:37 crc kubenswrapper[4799]: I0121 18:00:37.179001 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c78fg" event={"ID":"f5f07422-440a-44be-9f28-f3b2ee04f29c","Type":"ContainerDied","Data":"25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce"} Jan 21 18:00:37 crc kubenswrapper[4799]: I0121 18:00:37.180590 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c78fg" event={"ID":"f5f07422-440a-44be-9f28-f3b2ee04f29c","Type":"ContainerStarted","Data":"db9bb029f8fac995be6fe0dbe50a3736023abd93797e624af3a39d62ecc1f2e4"} Jan 21 18:00:38 crc kubenswrapper[4799]: I0121 18:00:38.241883 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c78fg" event={"ID":"f5f07422-440a-44be-9f28-f3b2ee04f29c","Type":"ContainerStarted","Data":"bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992"} Jan 21 18:00:39 crc kubenswrapper[4799]: I0121 18:00:39.245703 4799 generic.go:334] "Generic (PLEG): container finished" podID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerID="bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992" exitCode=0 Jan 21 18:00:39 crc kubenswrapper[4799]: I0121 18:00:39.246103 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c78fg" event={"ID":"f5f07422-440a-44be-9f28-f3b2ee04f29c","Type":"ContainerDied","Data":"bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992"} Jan 21 18:00:40 crc kubenswrapper[4799]: I0121 18:00:40.260959 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c78fg" event={"ID":"f5f07422-440a-44be-9f28-f3b2ee04f29c","Type":"ContainerStarted","Data":"db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83"} Jan 21 18:00:40 crc kubenswrapper[4799]: I0121 18:00:40.303918 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c78fg" podStartSLOduration=2.831750907 podStartE2EDuration="5.303892727s" podCreationTimestamp="2026-01-21 18:00:35 +0000 UTC" firstStartedPulling="2026-01-21 18:00:37.181820754 +0000 UTC m=+1663.808110777" lastFinishedPulling="2026-01-21 18:00:39.653962564 +0000 UTC m=+1666.280252597" observedRunningTime="2026-01-21 18:00:40.288984826 +0000 UTC m=+1666.915274859" watchObservedRunningTime="2026-01-21 18:00:40.303892727 +0000 UTC m=+1666.930182760" Jan 21 18:00:45 crc kubenswrapper[4799]: I0121 18:00:45.967584 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:45 crc kubenswrapper[4799]: I0121 18:00:45.968170 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:46 crc kubenswrapper[4799]: I0121 18:00:46.016946 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:46 crc kubenswrapper[4799]: I0121 18:00:46.371842 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:46 crc kubenswrapper[4799]: I0121 18:00:46.422822 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c78fg"] Jan 21 18:00:48 crc kubenswrapper[4799]: I0121 18:00:48.342936 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c78fg" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerName="registry-server" containerID="cri-o://db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83" gracePeriod=2 Jan 21 18:00:48 crc kubenswrapper[4799]: I0121 18:00:48.866522 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:48 crc kubenswrapper[4799]: I0121 18:00:48.983955 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-catalog-content\") pod \"f5f07422-440a-44be-9f28-f3b2ee04f29c\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " Jan 21 18:00:48 crc kubenswrapper[4799]: I0121 18:00:48.984288 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxllr\" (UniqueName: \"kubernetes.io/projected/f5f07422-440a-44be-9f28-f3b2ee04f29c-kube-api-access-mxllr\") pod \"f5f07422-440a-44be-9f28-f3b2ee04f29c\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " Jan 21 18:00:48 crc kubenswrapper[4799]: I0121 18:00:48.984520 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-utilities\") pod \"f5f07422-440a-44be-9f28-f3b2ee04f29c\" (UID: \"f5f07422-440a-44be-9f28-f3b2ee04f29c\") " Jan 21 18:00:48 crc kubenswrapper[4799]: I0121 18:00:48.985966 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-utilities" (OuterVolumeSpecName: "utilities") pod "f5f07422-440a-44be-9f28-f3b2ee04f29c" (UID: "f5f07422-440a-44be-9f28-f3b2ee04f29c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:00:48 crc kubenswrapper[4799]: I0121 18:00:48.991955 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5f07422-440a-44be-9f28-f3b2ee04f29c-kube-api-access-mxllr" (OuterVolumeSpecName: "kube-api-access-mxllr") pod "f5f07422-440a-44be-9f28-f3b2ee04f29c" (UID: "f5f07422-440a-44be-9f28-f3b2ee04f29c"). InnerVolumeSpecName "kube-api-access-mxllr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.044819 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5f07422-440a-44be-9f28-f3b2ee04f29c" (UID: "f5f07422-440a-44be-9f28-f3b2ee04f29c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.086697 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.086728 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxllr\" (UniqueName: \"kubernetes.io/projected/f5f07422-440a-44be-9f28-f3b2ee04f29c-kube-api-access-mxllr\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.086740 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5f07422-440a-44be-9f28-f3b2ee04f29c-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.356020 4799 generic.go:334] "Generic (PLEG): container finished" podID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerID="db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83" exitCode=0 Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.356319 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c78fg" event={"ID":"f5f07422-440a-44be-9f28-f3b2ee04f29c","Type":"ContainerDied","Data":"db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83"} Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.356354 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c78fg" event={"ID":"f5f07422-440a-44be-9f28-f3b2ee04f29c","Type":"ContainerDied","Data":"db9bb029f8fac995be6fe0dbe50a3736023abd93797e624af3a39d62ecc1f2e4"} Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.356377 4799 scope.go:117] "RemoveContainer" containerID="db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.356566 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c78fg" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.390674 4799 scope.go:117] "RemoveContainer" containerID="bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.400389 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c78fg"] Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.415582 4799 scope.go:117] "RemoveContainer" containerID="25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.417903 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c78fg"] Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.475332 4799 scope.go:117] "RemoveContainer" containerID="db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83" Jan 21 18:00:49 crc kubenswrapper[4799]: E0121 18:00:49.475756 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83\": container with ID starting with db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83 not found: ID does not exist" containerID="db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.475802 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83"} err="failed to get container status \"db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83\": rpc error: code = NotFound desc = could not find container \"db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83\": container with ID starting with db3f9467ff88c44b3ce7e0714ae64e3eb7143a0ed2bcc4c2348054bccb1b5f83 not found: ID does not exist" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.475833 4799 scope.go:117] "RemoveContainer" containerID="bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992" Jan 21 18:00:49 crc kubenswrapper[4799]: E0121 18:00:49.476384 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992\": container with ID starting with bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992 not found: ID does not exist" containerID="bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.476431 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992"} err="failed to get container status \"bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992\": rpc error: code = NotFound desc = could not find container \"bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992\": container with ID starting with bc649c108cb848b06f6073f2f299932b9c672a9f353d339bcf385bbe83f97992 not found: ID does not exist" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.476467 4799 scope.go:117] "RemoveContainer" containerID="25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce" Jan 21 18:00:49 crc kubenswrapper[4799]: E0121 18:00:49.476677 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce\": container with ID starting with 25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce not found: ID does not exist" containerID="25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce" Jan 21 18:00:49 crc kubenswrapper[4799]: I0121 18:00:49.476699 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce"} err="failed to get container status \"25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce\": rpc error: code = NotFound desc = could not find container \"25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce\": container with ID starting with 25121ad7e1350f0ceab3700fb534bfde8b324fb558c6b3110c748595a91275ce not found: ID does not exist" Jan 21 18:00:50 crc kubenswrapper[4799]: I0121 18:00:50.217953 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" path="/var/lib/kubelet/pods/f5f07422-440a-44be-9f28-f3b2ee04f29c/volumes" Jan 21 18:00:55 crc kubenswrapper[4799]: I0121 18:00:55.971298 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:00:55 crc kubenswrapper[4799]: I0121 18:00:55.971903 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:00:59 crc kubenswrapper[4799]: I0121 18:00:59.064675 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-6xr5d"] Jan 21 18:00:59 crc kubenswrapper[4799]: I0121 18:00:59.079371 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-6xr5d"] Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.041459 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-83af-account-create-update-tz6l9"] Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.064087 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-4a30-account-create-update-wqr8v"] Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.078709 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5362-account-create-update-9xw4s"] Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.087592 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-83af-account-create-update-tz6l9"] Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.095811 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-4a30-account-create-update-wqr8v"] Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.108245 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5362-account-create-update-9xw4s"] Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.146549 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29483641-hhgjf"] Jan 21 18:01:00 crc kubenswrapper[4799]: E0121 18:01:00.147425 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerName="extract-content" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.147507 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerName="extract-content" Jan 21 18:01:00 crc kubenswrapper[4799]: E0121 18:01:00.147586 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerName="extract-utilities" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.147640 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerName="extract-utilities" Jan 21 18:01:00 crc kubenswrapper[4799]: E0121 18:01:00.147706 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerName="registry-server" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.147761 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerName="registry-server" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.148116 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5f07422-440a-44be-9f28-f3b2ee04f29c" containerName="registry-server" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.148951 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.157445 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29483641-hhgjf"] Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.217010 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="082ee676-0c0e-48fc-a537-aac7e95dd4ae" path="/var/lib/kubelet/pods/082ee676-0c0e-48fc-a537-aac7e95dd4ae/volumes" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.217983 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39b6f6dd-e24e-4398-87d7-0fc790374a12" path="/var/lib/kubelet/pods/39b6f6dd-e24e-4398-87d7-0fc790374a12/volumes" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.218607 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="747c974f-6219-4bb3-a6d0-e657bd201d5d" path="/var/lib/kubelet/pods/747c974f-6219-4bb3-a6d0-e657bd201d5d/volumes" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.219343 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d517139c-ff8f-4320-8901-06ff9955241c" path="/var/lib/kubelet/pods/d517139c-ff8f-4320-8901-06ff9955241c/volumes" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.250102 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-fernet-keys\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.250474 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-combined-ca-bundle\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.250719 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-config-data\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.250859 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cjvj\" (UniqueName: \"kubernetes.io/projected/e4a59182-8711-4086-b753-a527b88f464b-kube-api-access-6cjvj\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.352856 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-combined-ca-bundle\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.352938 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-config-data\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.352978 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cjvj\" (UniqueName: \"kubernetes.io/projected/e4a59182-8711-4086-b753-a527b88f464b-kube-api-access-6cjvj\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.353072 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-fernet-keys\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.360162 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-config-data\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.360291 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-combined-ca-bundle\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.361180 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-fernet-keys\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.375799 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cjvj\" (UniqueName: \"kubernetes.io/projected/e4a59182-8711-4086-b753-a527b88f464b-kube-api-access-6cjvj\") pod \"keystone-cron-29483641-hhgjf\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.493276 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:00 crc kubenswrapper[4799]: I0121 18:01:00.983252 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29483641-hhgjf"] Jan 21 18:01:01 crc kubenswrapper[4799]: I0121 18:01:01.518819 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29483641-hhgjf" event={"ID":"e4a59182-8711-4086-b753-a527b88f464b","Type":"ContainerStarted","Data":"b77fb1ca5463bc6e3a61355a03b39a19cbf14e4c9473cc36b7471572969db0a8"} Jan 21 18:01:01 crc kubenswrapper[4799]: I0121 18:01:01.519192 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29483641-hhgjf" event={"ID":"e4a59182-8711-4086-b753-a527b88f464b","Type":"ContainerStarted","Data":"25ffe62f3d3e02faf8b8fece3a2c8003c93bf4bb13f379cb70ea48b7e56a91ed"} Jan 21 18:01:01 crc kubenswrapper[4799]: I0121 18:01:01.544432 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29483641-hhgjf" podStartSLOduration=1.544378391 podStartE2EDuration="1.544378391s" podCreationTimestamp="2026-01-21 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:01:01.537425079 +0000 UTC m=+1688.163715102" watchObservedRunningTime="2026-01-21 18:01:01.544378391 +0000 UTC m=+1688.170668424" Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.035776 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-rg5tc"] Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.050037 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-rg5tc"] Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.065059 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-25rbc"] Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.074070 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-25rbc"] Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.235698 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="693423df-cd0b-4d1e-a58d-ec5f062db23d" path="/var/lib/kubelet/pods/693423df-cd0b-4d1e-a58d-ec5f062db23d/volumes" Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.236800 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc6f84a8-5008-4534-b894-1caa2f8585da" path="/var/lib/kubelet/pods/dc6f84a8-5008-4534-b894-1caa2f8585da/volumes" Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.561398 4799 generic.go:334] "Generic (PLEG): container finished" podID="e4a59182-8711-4086-b753-a527b88f464b" containerID="b77fb1ca5463bc6e3a61355a03b39a19cbf14e4c9473cc36b7471572969db0a8" exitCode=0 Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.561470 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29483641-hhgjf" event={"ID":"e4a59182-8711-4086-b753-a527b88f464b","Type":"ContainerDied","Data":"b77fb1ca5463bc6e3a61355a03b39a19cbf14e4c9473cc36b7471572969db0a8"} Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.564175 4799 generic.go:334] "Generic (PLEG): container finished" podID="7f2d9e34-479a-44ae-b64e-55baf5645dfc" containerID="8e0def670c763515c6a258ea671e54fb0586c38bf3b0e5dc67858c403e01a414" exitCode=0 Jan 21 18:01:04 crc kubenswrapper[4799]: I0121 18:01:04.564231 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" event={"ID":"7f2d9e34-479a-44ae-b64e-55baf5645dfc","Type":"ContainerDied","Data":"8e0def670c763515c6a258ea671e54fb0586c38bf3b0e5dc67858c403e01a414"} Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.014407 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.104980 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-combined-ca-bundle\") pod \"e4a59182-8711-4086-b753-a527b88f464b\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.105381 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-config-data\") pod \"e4a59182-8711-4086-b753-a527b88f464b\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.105494 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cjvj\" (UniqueName: \"kubernetes.io/projected/e4a59182-8711-4086-b753-a527b88f464b-kube-api-access-6cjvj\") pod \"e4a59182-8711-4086-b753-a527b88f464b\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.105608 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-fernet-keys\") pod \"e4a59182-8711-4086-b753-a527b88f464b\" (UID: \"e4a59182-8711-4086-b753-a527b88f464b\") " Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.125685 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e4a59182-8711-4086-b753-a527b88f464b" (UID: "e4a59182-8711-4086-b753-a527b88f464b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.136645 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4a59182-8711-4086-b753-a527b88f464b-kube-api-access-6cjvj" (OuterVolumeSpecName: "kube-api-access-6cjvj") pod "e4a59182-8711-4086-b753-a527b88f464b" (UID: "e4a59182-8711-4086-b753-a527b88f464b"). InnerVolumeSpecName "kube-api-access-6cjvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.209407 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cjvj\" (UniqueName: \"kubernetes.io/projected/e4a59182-8711-4086-b753-a527b88f464b-kube-api-access-6cjvj\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.209896 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.209419 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.213342 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-config-data" (OuterVolumeSpecName: "config-data") pod "e4a59182-8711-4086-b753-a527b88f464b" (UID: "e4a59182-8711-4086-b753-a527b88f464b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.242931 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4a59182-8711-4086-b753-a527b88f464b" (UID: "e4a59182-8711-4086-b753-a527b88f464b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.310914 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-bootstrap-combined-ca-bundle\") pod \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.311006 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-ssh-key-openstack-edpm-ipam\") pod \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.311042 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bff5\" (UniqueName: \"kubernetes.io/projected/7f2d9e34-479a-44ae-b64e-55baf5645dfc-kube-api-access-4bff5\") pod \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.311698 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-inventory\") pod \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\" (UID: \"7f2d9e34-479a-44ae-b64e-55baf5645dfc\") " Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.312231 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.312255 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4a59182-8711-4086-b753-a527b88f464b-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.314385 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "7f2d9e34-479a-44ae-b64e-55baf5645dfc" (UID: "7f2d9e34-479a-44ae-b64e-55baf5645dfc"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.315909 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f2d9e34-479a-44ae-b64e-55baf5645dfc-kube-api-access-4bff5" (OuterVolumeSpecName: "kube-api-access-4bff5") pod "7f2d9e34-479a-44ae-b64e-55baf5645dfc" (UID: "7f2d9e34-479a-44ae-b64e-55baf5645dfc"). InnerVolumeSpecName "kube-api-access-4bff5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.346640 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-inventory" (OuterVolumeSpecName: "inventory") pod "7f2d9e34-479a-44ae-b64e-55baf5645dfc" (UID: "7f2d9e34-479a-44ae-b64e-55baf5645dfc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.348293 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7f2d9e34-479a-44ae-b64e-55baf5645dfc" (UID: "7f2d9e34-479a-44ae-b64e-55baf5645dfc"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.413636 4799 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.413674 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.413684 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bff5\" (UniqueName: \"kubernetes.io/projected/7f2d9e34-479a-44ae-b64e-55baf5645dfc-kube-api-access-4bff5\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.413693 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7f2d9e34-479a-44ae-b64e-55baf5645dfc-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.587712 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29483641-hhgjf" event={"ID":"e4a59182-8711-4086-b753-a527b88f464b","Type":"ContainerDied","Data":"25ffe62f3d3e02faf8b8fece3a2c8003c93bf4bb13f379cb70ea48b7e56a91ed"} Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.587769 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25ffe62f3d3e02faf8b8fece3a2c8003c93bf4bb13f379cb70ea48b7e56a91ed" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.587834 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29483641-hhgjf" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.590811 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" event={"ID":"7f2d9e34-479a-44ae-b64e-55baf5645dfc","Type":"ContainerDied","Data":"7493f6ffff96b82bdf893b6022ab8a7c4c702dac8dba9ced7c9974a35a93f178"} Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.590871 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7493f6ffff96b82bdf893b6022ab8a7c4c702dac8dba9ced7c9974a35a93f178" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.590932 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.693697 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d"] Jan 21 18:01:06 crc kubenswrapper[4799]: E0121 18:01:06.694413 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f2d9e34-479a-44ae-b64e-55baf5645dfc" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.694447 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f2d9e34-479a-44ae-b64e-55baf5645dfc" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 21 18:01:06 crc kubenswrapper[4799]: E0121 18:01:06.694490 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4a59182-8711-4086-b753-a527b88f464b" containerName="keystone-cron" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.694504 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4a59182-8711-4086-b753-a527b88f464b" containerName="keystone-cron" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.694777 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f2d9e34-479a-44ae-b64e-55baf5645dfc" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.694799 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4a59182-8711-4086-b753-a527b88f464b" containerName="keystone-cron" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.695779 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.699009 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.699342 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.705750 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.706112 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.706916 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d"] Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.722549 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.722871 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cjsm\" (UniqueName: \"kubernetes.io/projected/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-kube-api-access-6cjsm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.723094 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.825093 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cjsm\" (UniqueName: \"kubernetes.io/projected/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-kube-api-access-6cjsm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.825433 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.825550 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.829421 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.829556 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:06 crc kubenswrapper[4799]: I0121 18:01:06.841187 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cjsm\" (UniqueName: \"kubernetes.io/projected/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-kube-api-access-6cjsm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:07 crc kubenswrapper[4799]: I0121 18:01:07.017038 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:01:07 crc kubenswrapper[4799]: I0121 18:01:07.766067 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d"] Jan 21 18:01:08 crc kubenswrapper[4799]: I0121 18:01:08.616469 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" event={"ID":"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7","Type":"ContainerStarted","Data":"bac3649792243f49950bbcc84ea14a5d2e543efb03fa095b730a4edb155b183f"} Jan 21 18:01:08 crc kubenswrapper[4799]: I0121 18:01:08.616834 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" event={"ID":"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7","Type":"ContainerStarted","Data":"7a18b497e32cf066c758d5e80e8963853852f9c88639fd861ee7c2bbd0162b25"} Jan 21 18:01:08 crc kubenswrapper[4799]: I0121 18:01:08.635525 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" podStartSLOduration=2.129318365 podStartE2EDuration="2.6355039s" podCreationTimestamp="2026-01-21 18:01:06 +0000 UTC" firstStartedPulling="2026-01-21 18:01:07.763857688 +0000 UTC m=+1694.390147711" lastFinishedPulling="2026-01-21 18:01:08.270043223 +0000 UTC m=+1694.896333246" observedRunningTime="2026-01-21 18:01:08.635041977 +0000 UTC m=+1695.261332000" watchObservedRunningTime="2026-01-21 18:01:08.6355039 +0000 UTC m=+1695.261793933" Jan 21 18:01:17 crc kubenswrapper[4799]: I0121 18:01:17.054400 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-9m9t5"] Jan 21 18:01:17 crc kubenswrapper[4799]: I0121 18:01:17.069787 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-9m9t5"] Jan 21 18:01:18 crc kubenswrapper[4799]: I0121 18:01:18.218032 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c28dcade-7a63-4e0c-988b-1aac42353632" path="/var/lib/kubelet/pods/c28dcade-7a63-4e0c-988b-1aac42353632/volumes" Jan 21 18:01:25 crc kubenswrapper[4799]: I0121 18:01:25.971597 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:01:25 crc kubenswrapper[4799]: I0121 18:01:25.972175 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:01:25 crc kubenswrapper[4799]: I0121 18:01:25.972241 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:01:25 crc kubenswrapper[4799]: I0121 18:01:25.973278 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:01:25 crc kubenswrapper[4799]: I0121 18:01:25.973332 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" gracePeriod=600 Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.030841 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-5f84r"] Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.041227 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-665e-account-create-update-h8l8x"] Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.051634 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-5f84r"] Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.064271 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-665e-account-create-update-h8l8x"] Jan 21 18:01:26 crc kubenswrapper[4799]: E0121 18:01:26.095915 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.217383 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad2dcb68-2e9f-4104-9d7e-591638a67f60" path="/var/lib/kubelet/pods/ad2dcb68-2e9f-4104-9d7e-591638a67f60/volumes" Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.217982 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1f2fc84-eafa-4f58-9b63-ae4037e16786" path="/var/lib/kubelet/pods/f1f2fc84-eafa-4f58-9b63-ae4037e16786/volumes" Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.854403 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" exitCode=0 Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.854469 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861"} Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.854770 4799 scope.go:117] "RemoveContainer" containerID="1b8c7c0681d7bc6849b2f8af2f982319bd30fd0c75b4bf458a9fe26a72616dd0" Jan 21 18:01:26 crc kubenswrapper[4799]: I0121 18:01:26.855435 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:01:26 crc kubenswrapper[4799]: E0121 18:01:26.855746 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.240895 4799 scope.go:117] "RemoveContainer" containerID="1f868879eb3407085cc889ceabaddc4c5f09612e1757a2452abb1893e0fad878" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.265207 4799 scope.go:117] "RemoveContainer" containerID="afa598ced7a45ee637cd2a3f9a30e04109a5c2481da7c44df6e5f52ef667d3c6" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.298975 4799 scope.go:117] "RemoveContainer" containerID="8257368547dac2a98a11908961d8e956daf1070dcfa3a74cb1275428ab2cc9fb" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.337984 4799 scope.go:117] "RemoveContainer" containerID="56cc341f636759eb8be73d924f3983d29ea8ee7c208aca3c96869ee1168415a0" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.362785 4799 scope.go:117] "RemoveContainer" containerID="8e4869fe1b4ed83a664528fbe7bb4a92df601aa469f4d511220502d45c923c78" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.424941 4799 scope.go:117] "RemoveContainer" containerID="e268b7b6e4009e5ade7686cf1fb40157fab2888cd1c7a6aca298b45d83edfbf5" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.474398 4799 scope.go:117] "RemoveContainer" containerID="037da1ec757c6c3a5621741c7ccc8d58a7656c5e926475137dd4a5108b99a1a0" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.520933 4799 scope.go:117] "RemoveContainer" containerID="4ce41ac713a33a886a720efb9e85c805178b172844ea5fa6d59095971cb434da" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.584958 4799 scope.go:117] "RemoveContainer" containerID="3f6d4019b3f2b9ff8583ebcde5a8afb203320ca0be9babcd6d86c8d667f41cdb" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.608627 4799 scope.go:117] "RemoveContainer" containerID="54090834d9347604133a6822ab003bcde63d929770baaca2799c25431f01583c" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.632219 4799 scope.go:117] "RemoveContainer" containerID="995cae26e739d9a456ecb2d6cf1a4bcb754bfddde480558e3d28f917fe5b4814" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.657902 4799 scope.go:117] "RemoveContainer" containerID="e589326dc82b2741fba075b192f75b10504ab93b8983b3297975219f2985660b" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.676583 4799 scope.go:117] "RemoveContainer" containerID="6738287bddbaf0a41b8381e824d218575ef877c74badb99845415c8786f2cd7c" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.697804 4799 scope.go:117] "RemoveContainer" containerID="22135e09dbaa9baaec640bf5761ff28100f927c16c7d1a6d60b00558f74d18fc" Jan 21 18:01:37 crc kubenswrapper[4799]: I0121 18:01:37.722600 4799 scope.go:117] "RemoveContainer" containerID="e1ee834cfcec2ccdec730b3681d1ea54e39b1a1ab5303f8721bfef6f8acce1f7" Jan 21 18:01:39 crc kubenswrapper[4799]: I0121 18:01:39.053596 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-h9cvz"] Jan 21 18:01:39 crc kubenswrapper[4799]: I0121 18:01:39.065325 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-h9cvz"] Jan 21 18:01:40 crc kubenswrapper[4799]: I0121 18:01:40.205801 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:01:40 crc kubenswrapper[4799]: E0121 18:01:40.206647 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:01:40 crc kubenswrapper[4799]: I0121 18:01:40.218585 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="900a87e2-8f11-4a39-8b54-59283d6fc6c2" path="/var/lib/kubelet/pods/900a87e2-8f11-4a39-8b54-59283d6fc6c2/volumes" Jan 21 18:01:44 crc kubenswrapper[4799]: I0121 18:01:44.033034 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-c43d-account-create-update-c7d49"] Jan 21 18:01:44 crc kubenswrapper[4799]: I0121 18:01:44.044518 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-c43d-account-create-update-c7d49"] Jan 21 18:01:44 crc kubenswrapper[4799]: I0121 18:01:44.219477 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="237ef136-a48e-462a-b261-c7f2e386a15e" path="/var/lib/kubelet/pods/237ef136-a48e-462a-b261-c7f2e386a15e/volumes" Jan 21 18:01:52 crc kubenswrapper[4799]: I0121 18:01:52.205243 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:01:52 crc kubenswrapper[4799]: E0121 18:01:52.206099 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.043221 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-hzwxh"] Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.054515 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8290-account-create-update-tqxrl"] Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.068068 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-40ee-account-create-update-76684"] Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.078504 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-hzwxh"] Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.087310 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-t49pc"] Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.095947 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-t49pc"] Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.106507 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8290-account-create-update-tqxrl"] Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.116651 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-40ee-account-create-update-76684"] Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.221824 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74829fa6-f20b-437c-9a76-e336eeb52598" path="/var/lib/kubelet/pods/74829fa6-f20b-437c-9a76-e336eeb52598/volumes" Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.223040 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85b36cad-2226-43fc-840c-1b44fa673bcb" path="/var/lib/kubelet/pods/85b36cad-2226-43fc-840c-1b44fa673bcb/volumes" Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.223748 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95c5771e-7cc6-4529-a3fb-f8568b69a74a" path="/var/lib/kubelet/pods/95c5771e-7cc6-4529-a3fb-f8568b69a74a/volumes" Jan 21 18:01:54 crc kubenswrapper[4799]: I0121 18:01:54.224362 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b91e7f5a-8ad9-4965-8839-dc43dcfbaada" path="/var/lib/kubelet/pods/b91e7f5a-8ad9-4965-8839-dc43dcfbaada/volumes" Jan 21 18:01:59 crc kubenswrapper[4799]: I0121 18:01:59.035269 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-vlrpv"] Jan 21 18:01:59 crc kubenswrapper[4799]: I0121 18:01:59.046840 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-vlrpv"] Jan 21 18:02:00 crc kubenswrapper[4799]: I0121 18:02:00.218392 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="112b4dcd-ad4f-40da-9ec8-27bf53f989a8" path="/var/lib/kubelet/pods/112b4dcd-ad4f-40da-9ec8-27bf53f989a8/volumes" Jan 21 18:02:05 crc kubenswrapper[4799]: I0121 18:02:05.205458 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:02:05 crc kubenswrapper[4799]: E0121 18:02:05.206224 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:02:16 crc kubenswrapper[4799]: I0121 18:02:16.206034 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:02:16 crc kubenswrapper[4799]: E0121 18:02:16.207206 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:02:27 crc kubenswrapper[4799]: I0121 18:02:27.204982 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:02:27 crc kubenswrapper[4799]: E0121 18:02:27.205698 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:02:34 crc kubenswrapper[4799]: I0121 18:02:34.057989 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-kdwcc"] Jan 21 18:02:34 crc kubenswrapper[4799]: I0121 18:02:34.067403 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-kdwcc"] Jan 21 18:02:34 crc kubenswrapper[4799]: I0121 18:02:34.218301 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fba342a8-536f-4c59-bb2c-44984e0a7fe0" path="/var/lib/kubelet/pods/fba342a8-536f-4c59-bb2c-44984e0a7fe0/volumes" Jan 21 18:02:37 crc kubenswrapper[4799]: I0121 18:02:37.964488 4799 scope.go:117] "RemoveContainer" containerID="ef5851f3212dc38d3c345d96297e4ee5f40779ad626080f7c5b340a827b84051" Jan 21 18:02:37 crc kubenswrapper[4799]: I0121 18:02:37.991324 4799 scope.go:117] "RemoveContainer" containerID="1cc27ab2ad3ef045292ec06cc1fa63b7684e9e05a018b5c43b21dafae099ee42" Jan 21 18:02:38 crc kubenswrapper[4799]: I0121 18:02:38.086062 4799 scope.go:117] "RemoveContainer" containerID="6a08c663c40d53231c4a0d4cbaddf88e051773b48c26f19eaae4d2e08d2000f7" Jan 21 18:02:38 crc kubenswrapper[4799]: I0121 18:02:38.110038 4799 scope.go:117] "RemoveContainer" containerID="7277835ea22dd4eccf648b43927e7b41037756a06015668489d45fd127056e93" Jan 21 18:02:38 crc kubenswrapper[4799]: I0121 18:02:38.163624 4799 scope.go:117] "RemoveContainer" containerID="23baeb6ec3fb103afb54fc4934b124f7e4a47926b3f3d9e8d5151da9b31f350e" Jan 21 18:02:38 crc kubenswrapper[4799]: I0121 18:02:38.205215 4799 scope.go:117] "RemoveContainer" containerID="57223c5a296c05233437e22c0ec2240b21d77c771f5ab4a77c16b07e24f675be" Jan 21 18:02:38 crc kubenswrapper[4799]: I0121 18:02:38.260036 4799 scope.go:117] "RemoveContainer" containerID="bf1d4dd3499e3170b73ed69258aebf4787d5264aa5f62904658bf6f327e6bf56" Jan 21 18:02:38 crc kubenswrapper[4799]: I0121 18:02:38.281136 4799 scope.go:117] "RemoveContainer" containerID="96ae220119a2db24f19db09016369e25e284599bf2c06418e794ce843dcdf43e" Jan 21 18:02:39 crc kubenswrapper[4799]: I0121 18:02:39.398652 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:02:39 crc kubenswrapper[4799]: E0121 18:02:39.403634 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:02:41 crc kubenswrapper[4799]: I0121 18:02:41.064504 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-h8tvt"] Jan 21 18:02:41 crc kubenswrapper[4799]: I0121 18:02:41.076314 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-h8tvt"] Jan 21 18:02:41 crc kubenswrapper[4799]: I0121 18:02:41.086315 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-q4lsr"] Jan 21 18:02:41 crc kubenswrapper[4799]: I0121 18:02:41.096833 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-q4lsr"] Jan 21 18:02:42 crc kubenswrapper[4799]: I0121 18:02:42.043160 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-6zwqt"] Jan 21 18:02:42 crc kubenswrapper[4799]: I0121 18:02:42.055062 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-6zwqt"] Jan 21 18:02:42 crc kubenswrapper[4799]: I0121 18:02:42.217491 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27ad5c23-b3d3-41a1-a4ae-4821eb3524fd" path="/var/lib/kubelet/pods/27ad5c23-b3d3-41a1-a4ae-4821eb3524fd/volumes" Jan 21 18:02:42 crc kubenswrapper[4799]: I0121 18:02:42.219618 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c14e317-6824-489e-9c4a-e7cf337c2439" path="/var/lib/kubelet/pods/4c14e317-6824-489e-9c4a-e7cf337c2439/volumes" Jan 21 18:02:42 crc kubenswrapper[4799]: I0121 18:02:42.220438 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e9e3984-f73c-4a6d-8d5d-107481439374" path="/var/lib/kubelet/pods/9e9e3984-f73c-4a6d-8d5d-107481439374/volumes" Jan 21 18:02:43 crc kubenswrapper[4799]: I0121 18:02:43.045182 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-vkhcg"] Jan 21 18:02:43 crc kubenswrapper[4799]: I0121 18:02:43.060686 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-vkhcg"] Jan 21 18:02:44 crc kubenswrapper[4799]: I0121 18:02:44.234772 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7930da5-46c0-4cc3-a63a-316aff9f5b3a" path="/var/lib/kubelet/pods/d7930da5-46c0-4cc3-a63a-316aff9f5b3a/volumes" Jan 21 18:02:48 crc kubenswrapper[4799]: I0121 18:02:48.208255 4799 generic.go:334] "Generic (PLEG): container finished" podID="0bf0a460-fd95-41ad-b7a3-8f3fb4500db7" containerID="bac3649792243f49950bbcc84ea14a5d2e543efb03fa095b730a4edb155b183f" exitCode=0 Jan 21 18:02:48 crc kubenswrapper[4799]: I0121 18:02:48.219385 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" event={"ID":"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7","Type":"ContainerDied","Data":"bac3649792243f49950bbcc84ea14a5d2e543efb03fa095b730a4edb155b183f"} Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.694073 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.859636 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-ssh-key-openstack-edpm-ipam\") pod \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.859824 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cjsm\" (UniqueName: \"kubernetes.io/projected/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-kube-api-access-6cjsm\") pod \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.859859 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-inventory\") pod \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\" (UID: \"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7\") " Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.866441 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-kube-api-access-6cjsm" (OuterVolumeSpecName: "kube-api-access-6cjsm") pod "0bf0a460-fd95-41ad-b7a3-8f3fb4500db7" (UID: "0bf0a460-fd95-41ad-b7a3-8f3fb4500db7"). InnerVolumeSpecName "kube-api-access-6cjsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.889593 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0bf0a460-fd95-41ad-b7a3-8f3fb4500db7" (UID: "0bf0a460-fd95-41ad-b7a3-8f3fb4500db7"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.890021 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-inventory" (OuterVolumeSpecName: "inventory") pod "0bf0a460-fd95-41ad-b7a3-8f3fb4500db7" (UID: "0bf0a460-fd95-41ad-b7a3-8f3fb4500db7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.962682 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cjsm\" (UniqueName: \"kubernetes.io/projected/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-kube-api-access-6cjsm\") on node \"crc\" DevicePath \"\"" Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.962712 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:02:49 crc kubenswrapper[4799]: I0121 18:02:49.962722 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0bf0a460-fd95-41ad-b7a3-8f3fb4500db7-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.238599 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" event={"ID":"0bf0a460-fd95-41ad-b7a3-8f3fb4500db7","Type":"ContainerDied","Data":"7a18b497e32cf066c758d5e80e8963853852f9c88639fd861ee7c2bbd0162b25"} Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.238772 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a18b497e32cf066c758d5e80e8963853852f9c88639fd861ee7c2bbd0162b25" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.238679 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.332956 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6"] Jan 21 18:02:50 crc kubenswrapper[4799]: E0121 18:02:50.334061 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf0a460-fd95-41ad-b7a3-8f3fb4500db7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.334210 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf0a460-fd95-41ad-b7a3-8f3fb4500db7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.334638 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bf0a460-fd95-41ad-b7a3-8f3fb4500db7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.335592 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.339555 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.340455 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.340830 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.341015 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.344857 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6"] Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.473519 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4plm\" (UniqueName: \"kubernetes.io/projected/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-kube-api-access-b4plm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.473578 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.473900 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.576059 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4plm\" (UniqueName: \"kubernetes.io/projected/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-kube-api-access-b4plm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.576161 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.576322 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.580868 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.586857 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.592462 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4plm\" (UniqueName: \"kubernetes.io/projected/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-kube-api-access-b4plm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:50 crc kubenswrapper[4799]: I0121 18:02:50.656799 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:02:51 crc kubenswrapper[4799]: I0121 18:02:51.247352 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6"] Jan 21 18:02:52 crc kubenswrapper[4799]: I0121 18:02:52.259472 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" event={"ID":"8cea4a3f-5c0a-4e15-a62d-64798a8f818e","Type":"ContainerStarted","Data":"ff3b745c545de80a4816c76193546f3329abeac57720fa739d3d5a046a8bdd30"} Jan 21 18:02:52 crc kubenswrapper[4799]: I0121 18:02:52.260071 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" event={"ID":"8cea4a3f-5c0a-4e15-a62d-64798a8f818e","Type":"ContainerStarted","Data":"c60e1d630ed1cf4333a7df2102d81dc6b6b0ab366c4591565cc21a6cb4e69719"} Jan 21 18:02:52 crc kubenswrapper[4799]: I0121 18:02:52.280609 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" podStartSLOduration=1.6334988849999998 podStartE2EDuration="2.2805697s" podCreationTimestamp="2026-01-21 18:02:50 +0000 UTC" firstStartedPulling="2026-01-21 18:02:51.250957973 +0000 UTC m=+1797.877247996" lastFinishedPulling="2026-01-21 18:02:51.898028788 +0000 UTC m=+1798.524318811" observedRunningTime="2026-01-21 18:02:52.277791973 +0000 UTC m=+1798.904082016" watchObservedRunningTime="2026-01-21 18:02:52.2805697 +0000 UTC m=+1798.906859723" Jan 21 18:02:54 crc kubenswrapper[4799]: I0121 18:02:54.213027 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:02:54 crc kubenswrapper[4799]: E0121 18:02:54.213604 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:03:00 crc kubenswrapper[4799]: I0121 18:03:00.042196 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-74m2t"] Jan 21 18:03:00 crc kubenswrapper[4799]: I0121 18:03:00.051771 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-s94pl"] Jan 21 18:03:00 crc kubenswrapper[4799]: I0121 18:03:00.067463 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-s94pl"] Jan 21 18:03:00 crc kubenswrapper[4799]: I0121 18:03:00.077248 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-74m2t"] Jan 21 18:03:00 crc kubenswrapper[4799]: I0121 18:03:00.218524 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45457092-3e80-4528-99f1-b1f5f1c2f128" path="/var/lib/kubelet/pods/45457092-3e80-4528-99f1-b1f5f1c2f128/volumes" Jan 21 18:03:00 crc kubenswrapper[4799]: I0121 18:03:00.219442 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="482b08ae-060f-465a-9085-20d742c22a13" path="/var/lib/kubelet/pods/482b08ae-060f-465a-9085-20d742c22a13/volumes" Jan 21 18:03:06 crc kubenswrapper[4799]: I0121 18:03:06.204967 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:03:06 crc kubenswrapper[4799]: E0121 18:03:06.205876 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:03:17 crc kubenswrapper[4799]: I0121 18:03:17.205061 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:03:17 crc kubenswrapper[4799]: E0121 18:03:17.205914 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:03:29 crc kubenswrapper[4799]: I0121 18:03:29.205593 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:03:29 crc kubenswrapper[4799]: E0121 18:03:29.206829 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:03:38 crc kubenswrapper[4799]: I0121 18:03:38.489523 4799 scope.go:117] "RemoveContainer" containerID="02c152ea9b9ec58ea9d61e88f24f232abc6fd9c319333c447f47c7705ddfd950" Jan 21 18:03:38 crc kubenswrapper[4799]: I0121 18:03:38.569592 4799 scope.go:117] "RemoveContainer" containerID="a82d0b436041595b747a1b7b33caff03aa3ff204c8ecbb0708b450b6a580973c" Jan 21 18:03:38 crc kubenswrapper[4799]: I0121 18:03:38.623425 4799 scope.go:117] "RemoveContainer" containerID="22e51f92a929b9cd5d7e5262c57ad52cae081aac91b8a43a7ac2bf2cd078314c" Jan 21 18:03:38 crc kubenswrapper[4799]: I0121 18:03:38.656865 4799 scope.go:117] "RemoveContainer" containerID="e3da8b23a9adbcf3871c602e691f4eeca53327e4701a0d57d505521c4ed93547" Jan 21 18:03:38 crc kubenswrapper[4799]: I0121 18:03:38.731682 4799 scope.go:117] "RemoveContainer" containerID="45f86041843fab27b22ec70bb3c93e872b5440d034d491e2aab2734943be1c66" Jan 21 18:03:38 crc kubenswrapper[4799]: I0121 18:03:38.791545 4799 scope.go:117] "RemoveContainer" containerID="63f960c35f097c4f3ff07c7a6040984afd5aaa1799cd3cbad02d5e2b724834d1" Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.085245 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-14ef-account-create-update-hnpvc"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.098647 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-rdss5"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.106618 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-77bgq"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.115348 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-4c13-account-create-update-82mwh"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.122873 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-90ed-account-create-update-fp7tj"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.132374 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-qd6mw"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.140718 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-14ef-account-create-update-hnpvc"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.152867 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-rdss5"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.162143 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-77bgq"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.170958 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-4c13-account-create-update-82mwh"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.180199 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-qd6mw"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.193344 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-90ed-account-create-update-fp7tj"] Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.205984 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:03:42 crc kubenswrapper[4799]: E0121 18:03:42.206480 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.219339 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b5b0c77-a492-474c-b2b9-c0c2e17868dc" path="/var/lib/kubelet/pods/0b5b0c77-a492-474c-b2b9-c0c2e17868dc/volumes" Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.220039 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5" path="/var/lib/kubelet/pods/1cc6a2db-ad5f-4c16-a349-c6be5fc9aff5/volumes" Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.220666 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31e79d66-2da6-47ba-8adf-23e156fa8aae" path="/var/lib/kubelet/pods/31e79d66-2da6-47ba-8adf-23e156fa8aae/volumes" Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.221442 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91716d4a-ab28-4634-bdad-f9e1ba454cc3" path="/var/lib/kubelet/pods/91716d4a-ab28-4634-bdad-f9e1ba454cc3/volumes" Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.222672 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cab07ceb-5e7d-400e-92bb-aa4c08af2a56" path="/var/lib/kubelet/pods/cab07ceb-5e7d-400e-92bb-aa4c08af2a56/volumes" Jan 21 18:03:42 crc kubenswrapper[4799]: I0121 18:03:42.223290 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9ee9412-63e5-4fa6-bddd-c362c4241a16" path="/var/lib/kubelet/pods/f9ee9412-63e5-4fa6-bddd-c362c4241a16/volumes" Jan 21 18:03:57 crc kubenswrapper[4799]: I0121 18:03:57.205696 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:03:57 crc kubenswrapper[4799]: E0121 18:03:57.206355 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:04:09 crc kubenswrapper[4799]: I0121 18:04:09.111220 4799 generic.go:334] "Generic (PLEG): container finished" podID="8cea4a3f-5c0a-4e15-a62d-64798a8f818e" containerID="ff3b745c545de80a4816c76193546f3329abeac57720fa739d3d5a046a8bdd30" exitCode=0 Jan 21 18:04:09 crc kubenswrapper[4799]: I0121 18:04:09.111330 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" event={"ID":"8cea4a3f-5c0a-4e15-a62d-64798a8f818e","Type":"ContainerDied","Data":"ff3b745c545de80a4816c76193546f3329abeac57720fa739d3d5a046a8bdd30"} Jan 21 18:04:09 crc kubenswrapper[4799]: I0121 18:04:09.205015 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:04:09 crc kubenswrapper[4799]: E0121 18:04:09.205294 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.556622 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.748858 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4plm\" (UniqueName: \"kubernetes.io/projected/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-kube-api-access-b4plm\") pod \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.749089 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-inventory\") pod \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.749133 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-ssh-key-openstack-edpm-ipam\") pod \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\" (UID: \"8cea4a3f-5c0a-4e15-a62d-64798a8f818e\") " Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.755356 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-kube-api-access-b4plm" (OuterVolumeSpecName: "kube-api-access-b4plm") pod "8cea4a3f-5c0a-4e15-a62d-64798a8f818e" (UID: "8cea4a3f-5c0a-4e15-a62d-64798a8f818e"). InnerVolumeSpecName "kube-api-access-b4plm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.790212 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "8cea4a3f-5c0a-4e15-a62d-64798a8f818e" (UID: "8cea4a3f-5c0a-4e15-a62d-64798a8f818e"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.790861 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-inventory" (OuterVolumeSpecName: "inventory") pod "8cea4a3f-5c0a-4e15-a62d-64798a8f818e" (UID: "8cea4a3f-5c0a-4e15-a62d-64798a8f818e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.852665 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.852707 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4plm\" (UniqueName: \"kubernetes.io/projected/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-kube-api-access-b4plm\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:10 crc kubenswrapper[4799]: I0121 18:04:10.852719 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cea4a3f-5c0a-4e15-a62d-64798a8f818e-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.168639 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" event={"ID":"8cea4a3f-5c0a-4e15-a62d-64798a8f818e","Type":"ContainerDied","Data":"c60e1d630ed1cf4333a7df2102d81dc6b6b0ab366c4591565cc21a6cb4e69719"} Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.169337 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c60e1d630ed1cf4333a7df2102d81dc6b6b0ab366c4591565cc21a6cb4e69719" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.169451 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.254855 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2"] Jan 21 18:04:11 crc kubenswrapper[4799]: E0121 18:04:11.255717 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cea4a3f-5c0a-4e15-a62d-64798a8f818e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.255756 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cea4a3f-5c0a-4e15-a62d-64798a8f818e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.256257 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cea4a3f-5c0a-4e15-a62d-64798a8f818e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.257507 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.260156 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.260355 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.260379 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.266361 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2"] Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.268301 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.363444 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.363884 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.364015 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmxc9\" (UniqueName: \"kubernetes.io/projected/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-kube-api-access-wmxc9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.466440 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.466541 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmxc9\" (UniqueName: \"kubernetes.io/projected/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-kube-api-access-wmxc9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.466768 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.482954 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.494869 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.497506 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmxc9\" (UniqueName: \"kubernetes.io/projected/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-kube-api-access-wmxc9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-cxws2\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:11 crc kubenswrapper[4799]: I0121 18:04:11.580096 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:12 crc kubenswrapper[4799]: I0121 18:04:12.164784 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2"] Jan 21 18:04:12 crc kubenswrapper[4799]: I0121 18:04:12.171542 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:04:12 crc kubenswrapper[4799]: I0121 18:04:12.223971 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" event={"ID":"b2bf4e9d-98ab-403e-8275-ac50c1b2c108","Type":"ContainerStarted","Data":"a238bdda51c1892d53d1485a02a67e52630fd849caecf9d10ab8453877de79f9"} Jan 21 18:04:13 crc kubenswrapper[4799]: I0121 18:04:13.216869 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" event={"ID":"b2bf4e9d-98ab-403e-8275-ac50c1b2c108","Type":"ContainerStarted","Data":"6c4cee5e75195f146bc942f14224e5f90c9ddad60b9cbce2a7c91bdf3dc56e07"} Jan 21 18:04:13 crc kubenswrapper[4799]: I0121 18:04:13.237348 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" podStartSLOduration=1.8042774110000002 podStartE2EDuration="2.237303507s" podCreationTimestamp="2026-01-21 18:04:11 +0000 UTC" firstStartedPulling="2026-01-21 18:04:12.171225231 +0000 UTC m=+1878.797515254" lastFinishedPulling="2026-01-21 18:04:12.604251327 +0000 UTC m=+1879.230541350" observedRunningTime="2026-01-21 18:04:13.234689485 +0000 UTC m=+1879.860979518" watchObservedRunningTime="2026-01-21 18:04:13.237303507 +0000 UTC m=+1879.863593530" Jan 21 18:04:18 crc kubenswrapper[4799]: I0121 18:04:18.062354 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-w4p6b"] Jan 21 18:04:18 crc kubenswrapper[4799]: I0121 18:04:18.072991 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-w4p6b"] Jan 21 18:04:18 crc kubenswrapper[4799]: I0121 18:04:18.215943 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="310c250a-8e8a-402c-84d6-5fb50340d73d" path="/var/lib/kubelet/pods/310c250a-8e8a-402c-84d6-5fb50340d73d/volumes" Jan 21 18:04:20 crc kubenswrapper[4799]: I0121 18:04:20.282790 4799 generic.go:334] "Generic (PLEG): container finished" podID="b2bf4e9d-98ab-403e-8275-ac50c1b2c108" containerID="6c4cee5e75195f146bc942f14224e5f90c9ddad60b9cbce2a7c91bdf3dc56e07" exitCode=0 Jan 21 18:04:20 crc kubenswrapper[4799]: I0121 18:04:20.283082 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" event={"ID":"b2bf4e9d-98ab-403e-8275-ac50c1b2c108","Type":"ContainerDied","Data":"6c4cee5e75195f146bc942f14224e5f90c9ddad60b9cbce2a7c91bdf3dc56e07"} Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.758665 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.808255 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmxc9\" (UniqueName: \"kubernetes.io/projected/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-kube-api-access-wmxc9\") pod \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.808620 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-ssh-key-openstack-edpm-ipam\") pod \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.808682 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-inventory\") pod \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\" (UID: \"b2bf4e9d-98ab-403e-8275-ac50c1b2c108\") " Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.817362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-kube-api-access-wmxc9" (OuterVolumeSpecName: "kube-api-access-wmxc9") pod "b2bf4e9d-98ab-403e-8275-ac50c1b2c108" (UID: "b2bf4e9d-98ab-403e-8275-ac50c1b2c108"). InnerVolumeSpecName "kube-api-access-wmxc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.840446 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b2bf4e9d-98ab-403e-8275-ac50c1b2c108" (UID: "b2bf4e9d-98ab-403e-8275-ac50c1b2c108"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.841372 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-inventory" (OuterVolumeSpecName: "inventory") pod "b2bf4e9d-98ab-403e-8275-ac50c1b2c108" (UID: "b2bf4e9d-98ab-403e-8275-ac50c1b2c108"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.911679 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.911746 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:21 crc kubenswrapper[4799]: I0121 18:04:21.911755 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmxc9\" (UniqueName: \"kubernetes.io/projected/b2bf4e9d-98ab-403e-8275-ac50c1b2c108-kube-api-access-wmxc9\") on node \"crc\" DevicePath \"\"" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.301968 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" event={"ID":"b2bf4e9d-98ab-403e-8275-ac50c1b2c108","Type":"ContainerDied","Data":"a238bdda51c1892d53d1485a02a67e52630fd849caecf9d10ab8453877de79f9"} Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.302016 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a238bdda51c1892d53d1485a02a67e52630fd849caecf9d10ab8453877de79f9" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.302059 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-cxws2" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.386042 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml"] Jan 21 18:04:22 crc kubenswrapper[4799]: E0121 18:04:22.386483 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2bf4e9d-98ab-403e-8275-ac50c1b2c108" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.386501 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2bf4e9d-98ab-403e-8275-ac50c1b2c108" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.386697 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2bf4e9d-98ab-403e-8275-ac50c1b2c108" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.387394 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.389460 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.389610 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.389773 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.393154 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.403083 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml"] Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.420898 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.420964 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcjv7\" (UniqueName: \"kubernetes.io/projected/0d9cee91-78fe-4816-a3dc-db90e98bcddd-kube-api-access-lcjv7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.421029 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.522968 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.523038 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcjv7\" (UniqueName: \"kubernetes.io/projected/0d9cee91-78fe-4816-a3dc-db90e98bcddd-kube-api-access-lcjv7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.523092 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.527608 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.531804 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.541832 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcjv7\" (UniqueName: \"kubernetes.io/projected/0d9cee91-78fe-4816-a3dc-db90e98bcddd-kube-api-access-lcjv7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qknml\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:22 crc kubenswrapper[4799]: I0121 18:04:22.705576 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:04:23 crc kubenswrapper[4799]: I0121 18:04:23.262012 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml"] Jan 21 18:04:23 crc kubenswrapper[4799]: I0121 18:04:23.317477 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" event={"ID":"0d9cee91-78fe-4816-a3dc-db90e98bcddd","Type":"ContainerStarted","Data":"4d523eee1398002132149419623980598c8f41f5cf76762950412b8222de640a"} Jan 21 18:04:24 crc kubenswrapper[4799]: I0121 18:04:24.217942 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:04:24 crc kubenswrapper[4799]: E0121 18:04:24.218347 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:04:25 crc kubenswrapper[4799]: I0121 18:04:25.482556 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" event={"ID":"0d9cee91-78fe-4816-a3dc-db90e98bcddd","Type":"ContainerStarted","Data":"39af09f16b5c2cfb032ff06dc547fd266bd9901d57a26d2bded95e3e7db87f91"} Jan 21 18:04:25 crc kubenswrapper[4799]: I0121 18:04:25.533985 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" podStartSLOduration=2.608945443 podStartE2EDuration="3.533845799s" podCreationTimestamp="2026-01-21 18:04:22 +0000 UTC" firstStartedPulling="2026-01-21 18:04:23.269154496 +0000 UTC m=+1889.895444519" lastFinishedPulling="2026-01-21 18:04:24.194054852 +0000 UTC m=+1890.820344875" observedRunningTime="2026-01-21 18:04:25.506729636 +0000 UTC m=+1892.133019659" watchObservedRunningTime="2026-01-21 18:04:25.533845799 +0000 UTC m=+1892.160135812" Jan 21 18:04:38 crc kubenswrapper[4799]: I0121 18:04:38.959846 4799 scope.go:117] "RemoveContainer" containerID="10fbb7268ce91cc9fda07cec442f102c130869426157347b4a3061eb4b7a5461" Jan 21 18:04:39 crc kubenswrapper[4799]: I0121 18:04:39.023771 4799 scope.go:117] "RemoveContainer" containerID="ec42955d5ea5755cf63b92fe7dfa0ad0e817b52ae5570e901b18cc96850a546d" Jan 21 18:04:39 crc kubenswrapper[4799]: I0121 18:04:39.060216 4799 scope.go:117] "RemoveContainer" containerID="82d399d63d4a396aa21c315b901b81b5a177ffdc529792cc3bf2dcca8d80591b" Jan 21 18:04:39 crc kubenswrapper[4799]: I0121 18:04:39.100236 4799 scope.go:117] "RemoveContainer" containerID="2b7ebc821747d7d4e3ea7978e64fcba03894c44c590c4ca7dc476e418d9a3e9c" Jan 21 18:04:39 crc kubenswrapper[4799]: I0121 18:04:39.156147 4799 scope.go:117] "RemoveContainer" containerID="a1a675b49885bd9a1d3215e2ac9b63e051009f55fd5c4a06eb329fd67e09757e" Jan 21 18:04:39 crc kubenswrapper[4799]: I0121 18:04:39.203519 4799 scope.go:117] "RemoveContainer" containerID="c7a97f1850d2277f1c980d857c97b7f55bd1bbc0f9b2a72bb0474d587c6da282" Jan 21 18:04:39 crc kubenswrapper[4799]: I0121 18:04:39.207090 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:04:39 crc kubenswrapper[4799]: E0121 18:04:39.207504 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:04:39 crc kubenswrapper[4799]: I0121 18:04:39.270612 4799 scope.go:117] "RemoveContainer" containerID="29a33c604a25dc8e6f8ec3af1edd8e8c08051bba6a35abc8ab46823e429c8fde" Jan 21 18:04:44 crc kubenswrapper[4799]: I0121 18:04:44.054265 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-p5h5t"] Jan 21 18:04:44 crc kubenswrapper[4799]: I0121 18:04:44.062260 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-p5h5t"] Jan 21 18:04:44 crc kubenswrapper[4799]: I0121 18:04:44.219415 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbc0e1b8-d099-4a3b-b501-b8486d893927" path="/var/lib/kubelet/pods/fbc0e1b8-d099-4a3b-b501-b8486d893927/volumes" Jan 21 18:04:49 crc kubenswrapper[4799]: I0121 18:04:49.036166 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zbcj"] Jan 21 18:04:49 crc kubenswrapper[4799]: I0121 18:04:49.051615 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-2zbcj"] Jan 21 18:04:50 crc kubenswrapper[4799]: I0121 18:04:50.226054 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fecfb45f-5926-41d7-b7c8-317a1a077eaf" path="/var/lib/kubelet/pods/fecfb45f-5926-41d7-b7c8-317a1a077eaf/volumes" Jan 21 18:04:53 crc kubenswrapper[4799]: I0121 18:04:53.205555 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:04:53 crc kubenswrapper[4799]: E0121 18:04:53.206988 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:05:06 crc kubenswrapper[4799]: I0121 18:05:06.912969 4799 generic.go:334] "Generic (PLEG): container finished" podID="0d9cee91-78fe-4816-a3dc-db90e98bcddd" containerID="39af09f16b5c2cfb032ff06dc547fd266bd9901d57a26d2bded95e3e7db87f91" exitCode=0 Jan 21 18:05:06 crc kubenswrapper[4799]: I0121 18:05:06.913031 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" event={"ID":"0d9cee91-78fe-4816-a3dc-db90e98bcddd","Type":"ContainerDied","Data":"39af09f16b5c2cfb032ff06dc547fd266bd9901d57a26d2bded95e3e7db87f91"} Jan 21 18:05:07 crc kubenswrapper[4799]: I0121 18:05:07.205663 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:05:07 crc kubenswrapper[4799]: E0121 18:05:07.206355 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.416958 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.501970 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-ssh-key-openstack-edpm-ipam\") pod \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.502056 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-inventory\") pod \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.502150 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcjv7\" (UniqueName: \"kubernetes.io/projected/0d9cee91-78fe-4816-a3dc-db90e98bcddd-kube-api-access-lcjv7\") pod \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\" (UID: \"0d9cee91-78fe-4816-a3dc-db90e98bcddd\") " Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.508331 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d9cee91-78fe-4816-a3dc-db90e98bcddd-kube-api-access-lcjv7" (OuterVolumeSpecName: "kube-api-access-lcjv7") pod "0d9cee91-78fe-4816-a3dc-db90e98bcddd" (UID: "0d9cee91-78fe-4816-a3dc-db90e98bcddd"). InnerVolumeSpecName "kube-api-access-lcjv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.535947 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0d9cee91-78fe-4816-a3dc-db90e98bcddd" (UID: "0d9cee91-78fe-4816-a3dc-db90e98bcddd"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.543967 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-inventory" (OuterVolumeSpecName: "inventory") pod "0d9cee91-78fe-4816-a3dc-db90e98bcddd" (UID: "0d9cee91-78fe-4816-a3dc-db90e98bcddd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.605082 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.605158 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d9cee91-78fe-4816-a3dc-db90e98bcddd-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.605183 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcjv7\" (UniqueName: \"kubernetes.io/projected/0d9cee91-78fe-4816-a3dc-db90e98bcddd-kube-api-access-lcjv7\") on node \"crc\" DevicePath \"\"" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.937604 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" event={"ID":"0d9cee91-78fe-4816-a3dc-db90e98bcddd","Type":"ContainerDied","Data":"4d523eee1398002132149419623980598c8f41f5cf76762950412b8222de640a"} Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.937654 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d523eee1398002132149419623980598c8f41f5cf76762950412b8222de640a" Jan 21 18:05:08 crc kubenswrapper[4799]: I0121 18:05:08.937731 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qknml" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.050275 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p"] Jan 21 18:05:09 crc kubenswrapper[4799]: E0121 18:05:09.050980 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d9cee91-78fe-4816-a3dc-db90e98bcddd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.051008 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d9cee91-78fe-4816-a3dc-db90e98bcddd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.051290 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d9cee91-78fe-4816-a3dc-db90e98bcddd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.052403 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.054681 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.054964 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.056739 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.056816 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.060156 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p"] Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.114961 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9zsr\" (UniqueName: \"kubernetes.io/projected/d427281b-c110-468f-b056-78a91049bcd4-kube-api-access-d9zsr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.115024 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.115160 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.218315 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9zsr\" (UniqueName: \"kubernetes.io/projected/d427281b-c110-468f-b056-78a91049bcd4-kube-api-access-d9zsr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.218361 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.218428 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.222219 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.223453 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.235787 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9zsr\" (UniqueName: \"kubernetes.io/projected/d427281b-c110-468f-b056-78a91049bcd4-kube-api-access-d9zsr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.398100 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.930896 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p"] Jan 21 18:05:09 crc kubenswrapper[4799]: W0121 18:05:09.936250 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd427281b_c110_468f_b056_78a91049bcd4.slice/crio-0adc3c6d5c341dce4ff2ab3d062f6f6ed3f2b94b12c0bef8d7efe48dc5fa9051 WatchSource:0}: Error finding container 0adc3c6d5c341dce4ff2ab3d062f6f6ed3f2b94b12c0bef8d7efe48dc5fa9051: Status 404 returned error can't find the container with id 0adc3c6d5c341dce4ff2ab3d062f6f6ed3f2b94b12c0bef8d7efe48dc5fa9051 Jan 21 18:05:09 crc kubenswrapper[4799]: I0121 18:05:09.947475 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" event={"ID":"d427281b-c110-468f-b056-78a91049bcd4","Type":"ContainerStarted","Data":"0adc3c6d5c341dce4ff2ab3d062f6f6ed3f2b94b12c0bef8d7efe48dc5fa9051"} Jan 21 18:05:10 crc kubenswrapper[4799]: I0121 18:05:10.960549 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" event={"ID":"d427281b-c110-468f-b056-78a91049bcd4","Type":"ContainerStarted","Data":"f0bba7270fe44ad44e7793009a4aab57f0dd5ba5dc0b5a2b4fe6d2221ddc1568"} Jan 21 18:05:10 crc kubenswrapper[4799]: I0121 18:05:10.988101 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" podStartSLOduration=1.545632029 podStartE2EDuration="1.988076397s" podCreationTimestamp="2026-01-21 18:05:09 +0000 UTC" firstStartedPulling="2026-01-21 18:05:09.938749505 +0000 UTC m=+1936.565039528" lastFinishedPulling="2026-01-21 18:05:10.381193873 +0000 UTC m=+1937.007483896" observedRunningTime="2026-01-21 18:05:10.977950415 +0000 UTC m=+1937.604240448" watchObservedRunningTime="2026-01-21 18:05:10.988076397 +0000 UTC m=+1937.614366420" Jan 21 18:05:22 crc kubenswrapper[4799]: I0121 18:05:22.205874 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:05:22 crc kubenswrapper[4799]: E0121 18:05:22.206867 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:05:29 crc kubenswrapper[4799]: I0121 18:05:29.060242 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-24qlw"] Jan 21 18:05:29 crc kubenswrapper[4799]: I0121 18:05:29.074487 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-24qlw"] Jan 21 18:05:30 crc kubenswrapper[4799]: I0121 18:05:30.221943 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1faad361-9dfd-4168-aaa4-626082473a62" path="/var/lib/kubelet/pods/1faad361-9dfd-4168-aaa4-626082473a62/volumes" Jan 21 18:05:36 crc kubenswrapper[4799]: I0121 18:05:36.206088 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:05:36 crc kubenswrapper[4799]: E0121 18:05:36.206960 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:05:39 crc kubenswrapper[4799]: I0121 18:05:39.412262 4799 scope.go:117] "RemoveContainer" containerID="8d246599ecd08657078b8e4170b654092c811f5122d25bfbbdfdedfcf16646ef" Jan 21 18:05:39 crc kubenswrapper[4799]: I0121 18:05:39.472675 4799 scope.go:117] "RemoveContainer" containerID="2aca9b9dd8679a92295ee233b5fb8fcf01609815d98a37a5e078b1def4d1f871" Jan 21 18:05:39 crc kubenswrapper[4799]: I0121 18:05:39.539383 4799 scope.go:117] "RemoveContainer" containerID="d0e5986d3cb474c2f382f374d9b4eb4d38cb9a20691efe800702ae5913597568" Jan 21 18:05:49 crc kubenswrapper[4799]: I0121 18:05:49.205922 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:05:49 crc kubenswrapper[4799]: E0121 18:05:49.206794 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:06:00 crc kubenswrapper[4799]: I0121 18:06:00.206347 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:06:00 crc kubenswrapper[4799]: E0121 18:06:00.207227 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:06:08 crc kubenswrapper[4799]: I0121 18:06:08.594688 4799 generic.go:334] "Generic (PLEG): container finished" podID="d427281b-c110-468f-b056-78a91049bcd4" containerID="f0bba7270fe44ad44e7793009a4aab57f0dd5ba5dc0b5a2b4fe6d2221ddc1568" exitCode=0 Jan 21 18:06:08 crc kubenswrapper[4799]: I0121 18:06:08.595165 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" event={"ID":"d427281b-c110-468f-b056-78a91049bcd4","Type":"ContainerDied","Data":"f0bba7270fe44ad44e7793009a4aab57f0dd5ba5dc0b5a2b4fe6d2221ddc1568"} Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.087430 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.190668 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-ssh-key-openstack-edpm-ipam\") pod \"d427281b-c110-468f-b056-78a91049bcd4\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.190761 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-inventory\") pod \"d427281b-c110-468f-b056-78a91049bcd4\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.190923 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9zsr\" (UniqueName: \"kubernetes.io/projected/d427281b-c110-468f-b056-78a91049bcd4-kube-api-access-d9zsr\") pod \"d427281b-c110-468f-b056-78a91049bcd4\" (UID: \"d427281b-c110-468f-b056-78a91049bcd4\") " Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.195624 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d427281b-c110-468f-b056-78a91049bcd4-kube-api-access-d9zsr" (OuterVolumeSpecName: "kube-api-access-d9zsr") pod "d427281b-c110-468f-b056-78a91049bcd4" (UID: "d427281b-c110-468f-b056-78a91049bcd4"). InnerVolumeSpecName "kube-api-access-d9zsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.221024 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d427281b-c110-468f-b056-78a91049bcd4" (UID: "d427281b-c110-468f-b056-78a91049bcd4"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.226956 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-inventory" (OuterVolumeSpecName: "inventory") pod "d427281b-c110-468f-b056-78a91049bcd4" (UID: "d427281b-c110-468f-b056-78a91049bcd4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.293913 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.293983 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d427281b-c110-468f-b056-78a91049bcd4-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.293994 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9zsr\" (UniqueName: \"kubernetes.io/projected/d427281b-c110-468f-b056-78a91049bcd4-kube-api-access-d9zsr\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.614412 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" event={"ID":"d427281b-c110-468f-b056-78a91049bcd4","Type":"ContainerDied","Data":"0adc3c6d5c341dce4ff2ab3d062f6f6ed3f2b94b12c0bef8d7efe48dc5fa9051"} Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.614733 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0adc3c6d5c341dce4ff2ab3d062f6f6ed3f2b94b12c0bef8d7efe48dc5fa9051" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.614501 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.737514 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d92xl"] Jan 21 18:06:10 crc kubenswrapper[4799]: E0121 18:06:10.738348 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d427281b-c110-468f-b056-78a91049bcd4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.738484 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d427281b-c110-468f-b056-78a91049bcd4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.738880 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d427281b-c110-468f-b056-78a91049bcd4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.739903 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.742786 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.743113 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.744698 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.746309 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.747612 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d92xl"] Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.906266 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.906330 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4mh7\" (UniqueName: \"kubernetes.io/projected/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-kube-api-access-h4mh7\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:10 crc kubenswrapper[4799]: I0121 18:06:10.906777 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:11 crc kubenswrapper[4799]: I0121 18:06:11.008700 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:11 crc kubenswrapper[4799]: I0121 18:06:11.008782 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4mh7\" (UniqueName: \"kubernetes.io/projected/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-kube-api-access-h4mh7\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:11 crc kubenswrapper[4799]: I0121 18:06:11.008923 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:11 crc kubenswrapper[4799]: I0121 18:06:11.012563 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:11 crc kubenswrapper[4799]: I0121 18:06:11.015233 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:11 crc kubenswrapper[4799]: I0121 18:06:11.030246 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4mh7\" (UniqueName: \"kubernetes.io/projected/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-kube-api-access-h4mh7\") pod \"ssh-known-hosts-edpm-deployment-d92xl\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:11 crc kubenswrapper[4799]: I0121 18:06:11.059627 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:11 crc kubenswrapper[4799]: I0121 18:06:11.626284 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-d92xl"] Jan 21 18:06:12 crc kubenswrapper[4799]: I0121 18:06:12.205576 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:06:12 crc kubenswrapper[4799]: E0121 18:06:12.206097 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:06:12 crc kubenswrapper[4799]: I0121 18:06:12.637138 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" event={"ID":"a94b72a4-75d5-427c-86ab-014f1f9ee0a2","Type":"ContainerStarted","Data":"6ad11a823a0c1e559af6bdf75a489c684a438802acad188997b5dd45f392ead7"} Jan 21 18:06:12 crc kubenswrapper[4799]: I0121 18:06:12.637196 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" event={"ID":"a94b72a4-75d5-427c-86ab-014f1f9ee0a2","Type":"ContainerStarted","Data":"d33ca5000a1f00086fbed57ffe3ec1b2bf896531ba6798b5a4f99019a0cea7c4"} Jan 21 18:06:12 crc kubenswrapper[4799]: I0121 18:06:12.667838 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" podStartSLOduration=2.205434647 podStartE2EDuration="2.667812757s" podCreationTimestamp="2026-01-21 18:06:10 +0000 UTC" firstStartedPulling="2026-01-21 18:06:11.624605116 +0000 UTC m=+1998.250895139" lastFinishedPulling="2026-01-21 18:06:12.086983226 +0000 UTC m=+1998.713273249" observedRunningTime="2026-01-21 18:06:12.653217541 +0000 UTC m=+1999.279507574" watchObservedRunningTime="2026-01-21 18:06:12.667812757 +0000 UTC m=+1999.294102790" Jan 21 18:06:20 crc kubenswrapper[4799]: I0121 18:06:20.718968 4799 generic.go:334] "Generic (PLEG): container finished" podID="a94b72a4-75d5-427c-86ab-014f1f9ee0a2" containerID="6ad11a823a0c1e559af6bdf75a489c684a438802acad188997b5dd45f392ead7" exitCode=0 Jan 21 18:06:20 crc kubenswrapper[4799]: I0121 18:06:20.719063 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" event={"ID":"a94b72a4-75d5-427c-86ab-014f1f9ee0a2","Type":"ContainerDied","Data":"6ad11a823a0c1e559af6bdf75a489c684a438802acad188997b5dd45f392ead7"} Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.246043 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.291195 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-ssh-key-openstack-edpm-ipam\") pod \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.291711 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-inventory-0\") pod \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.292436 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4mh7\" (UniqueName: \"kubernetes.io/projected/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-kube-api-access-h4mh7\") pod \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\" (UID: \"a94b72a4-75d5-427c-86ab-014f1f9ee0a2\") " Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.297914 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-kube-api-access-h4mh7" (OuterVolumeSpecName: "kube-api-access-h4mh7") pod "a94b72a4-75d5-427c-86ab-014f1f9ee0a2" (UID: "a94b72a4-75d5-427c-86ab-014f1f9ee0a2"). InnerVolumeSpecName "kube-api-access-h4mh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.324727 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "a94b72a4-75d5-427c-86ab-014f1f9ee0a2" (UID: "a94b72a4-75d5-427c-86ab-014f1f9ee0a2"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.324763 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "a94b72a4-75d5-427c-86ab-014f1f9ee0a2" (UID: "a94b72a4-75d5-427c-86ab-014f1f9ee0a2"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.394893 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.394954 4799 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-inventory-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.394971 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4mh7\" (UniqueName: \"kubernetes.io/projected/a94b72a4-75d5-427c-86ab-014f1f9ee0a2-kube-api-access-h4mh7\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.741620 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" event={"ID":"a94b72a4-75d5-427c-86ab-014f1f9ee0a2","Type":"ContainerDied","Data":"d33ca5000a1f00086fbed57ffe3ec1b2bf896531ba6798b5a4f99019a0cea7c4"} Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.741671 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d33ca5000a1f00086fbed57ffe3ec1b2bf896531ba6798b5a4f99019a0cea7c4" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.741639 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-d92xl" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.829616 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r"] Jan 21 18:06:22 crc kubenswrapper[4799]: E0121 18:06:22.830322 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a94b72a4-75d5-427c-86ab-014f1f9ee0a2" containerName="ssh-known-hosts-edpm-deployment" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.830352 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a94b72a4-75d5-427c-86ab-014f1f9ee0a2" containerName="ssh-known-hosts-edpm-deployment" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.830640 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a94b72a4-75d5-427c-86ab-014f1f9ee0a2" containerName="ssh-known-hosts-edpm-deployment" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.831611 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.834610 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.834854 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.834862 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.835014 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.841663 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r"] Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.904336 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gjtg\" (UniqueName: \"kubernetes.io/projected/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-kube-api-access-5gjtg\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.904581 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:22 crc kubenswrapper[4799]: I0121 18:06:22.905298 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.007891 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.008018 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gjtg\" (UniqueName: \"kubernetes.io/projected/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-kube-api-access-5gjtg\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.008095 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.020624 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.027742 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.031426 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gjtg\" (UniqueName: \"kubernetes.io/projected/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-kube-api-access-5gjtg\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-2sm5r\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.162183 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.736839 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r"] Jan 21 18:06:23 crc kubenswrapper[4799]: W0121 18:06:23.740120 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ea44035_c1c7_45f2_921a_bf2d91a9a7d8.slice/crio-b475714b0b28c127c574c7e9e160e002ec1658ef37bfc6b024ed2ac5aefe98bc WatchSource:0}: Error finding container b475714b0b28c127c574c7e9e160e002ec1658ef37bfc6b024ed2ac5aefe98bc: Status 404 returned error can't find the container with id b475714b0b28c127c574c7e9e160e002ec1658ef37bfc6b024ed2ac5aefe98bc Jan 21 18:06:23 crc kubenswrapper[4799]: I0121 18:06:23.754907 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" event={"ID":"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8","Type":"ContainerStarted","Data":"b475714b0b28c127c574c7e9e160e002ec1658ef37bfc6b024ed2ac5aefe98bc"} Jan 21 18:06:24 crc kubenswrapper[4799]: I0121 18:06:24.766362 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" event={"ID":"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8","Type":"ContainerStarted","Data":"8042587a087561e5d9c1c8f013b6e5e8801d38d3aaddee8917378294cdd920a0"} Jan 21 18:06:24 crc kubenswrapper[4799]: I0121 18:06:24.782273 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" podStartSLOduration=2.34793655 podStartE2EDuration="2.782244472s" podCreationTimestamp="2026-01-21 18:06:22 +0000 UTC" firstStartedPulling="2026-01-21 18:06:23.743156194 +0000 UTC m=+2010.369446207" lastFinishedPulling="2026-01-21 18:06:24.177464096 +0000 UTC m=+2010.803754129" observedRunningTime="2026-01-21 18:06:24.780164514 +0000 UTC m=+2011.406454577" watchObservedRunningTime="2026-01-21 18:06:24.782244472 +0000 UTC m=+2011.408534525" Jan 21 18:06:25 crc kubenswrapper[4799]: I0121 18:06:25.205630 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:06:25 crc kubenswrapper[4799]: E0121 18:06:25.206180 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:06:34 crc kubenswrapper[4799]: I0121 18:06:34.881158 4799 generic.go:334] "Generic (PLEG): container finished" podID="1ea44035-c1c7-45f2-921a-bf2d91a9a7d8" containerID="8042587a087561e5d9c1c8f013b6e5e8801d38d3aaddee8917378294cdd920a0" exitCode=0 Jan 21 18:06:34 crc kubenswrapper[4799]: I0121 18:06:34.881219 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" event={"ID":"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8","Type":"ContainerDied","Data":"8042587a087561e5d9c1c8f013b6e5e8801d38d3aaddee8917378294cdd920a0"} Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.205284 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.499778 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.610381 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-inventory\") pod \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.610694 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-ssh-key-openstack-edpm-ipam\") pod \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.610951 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gjtg\" (UniqueName: \"kubernetes.io/projected/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-kube-api-access-5gjtg\") pod \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\" (UID: \"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8\") " Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.616185 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-kube-api-access-5gjtg" (OuterVolumeSpecName: "kube-api-access-5gjtg") pod "1ea44035-c1c7-45f2-921a-bf2d91a9a7d8" (UID: "1ea44035-c1c7-45f2-921a-bf2d91a9a7d8"). InnerVolumeSpecName "kube-api-access-5gjtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.637555 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1ea44035-c1c7-45f2-921a-bf2d91a9a7d8" (UID: "1ea44035-c1c7-45f2-921a-bf2d91a9a7d8"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.645526 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-inventory" (OuterVolumeSpecName: "inventory") pod "1ea44035-c1c7-45f2-921a-bf2d91a9a7d8" (UID: "1ea44035-c1c7-45f2-921a-bf2d91a9a7d8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.714381 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.714420 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gjtg\" (UniqueName: \"kubernetes.io/projected/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-kube-api-access-5gjtg\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.714438 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ea44035-c1c7-45f2-921a-bf2d91a9a7d8-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.923899 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"92f5a81a66f0edf29639d96ce3dd3c3aaf830d422f1a0c852d1348a026575cf5"} Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.927181 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" event={"ID":"1ea44035-c1c7-45f2-921a-bf2d91a9a7d8","Type":"ContainerDied","Data":"b475714b0b28c127c574c7e9e160e002ec1658ef37bfc6b024ed2ac5aefe98bc"} Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.927216 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b475714b0b28c127c574c7e9e160e002ec1658ef37bfc6b024ed2ac5aefe98bc" Jan 21 18:06:36 crc kubenswrapper[4799]: I0121 18:06:36.927272 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-2sm5r" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.035946 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf"] Jan 21 18:06:37 crc kubenswrapper[4799]: E0121 18:06:37.036596 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea44035-c1c7-45f2-921a-bf2d91a9a7d8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.036622 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea44035-c1c7-45f2-921a-bf2d91a9a7d8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.036939 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ea44035-c1c7-45f2-921a-bf2d91a9a7d8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.038010 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.040172 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.040356 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.040899 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.041121 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.047022 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf"] Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.140559 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.140812 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmn56\" (UniqueName: \"kubernetes.io/projected/947392cf-f31e-4cc3-85b9-3fcf86b289ef-kube-api-access-kmn56\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.141186 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.243675 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmn56\" (UniqueName: \"kubernetes.io/projected/947392cf-f31e-4cc3-85b9-3fcf86b289ef-kube-api-access-kmn56\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.243825 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.243949 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.249696 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.255805 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.262398 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmn56\" (UniqueName: \"kubernetes.io/projected/947392cf-f31e-4cc3-85b9-3fcf86b289ef-kube-api-access-kmn56\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.365055 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:37 crc kubenswrapper[4799]: I0121 18:06:37.904944 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf"] Jan 21 18:06:37 crc kubenswrapper[4799]: W0121 18:06:37.928460 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod947392cf_f31e_4cc3_85b9_3fcf86b289ef.slice/crio-75391ddc4cbf30c01a66422de78ae84830a5202fe5f39240b0c8dbe1b60f8797 WatchSource:0}: Error finding container 75391ddc4cbf30c01a66422de78ae84830a5202fe5f39240b0c8dbe1b60f8797: Status 404 returned error can't find the container with id 75391ddc4cbf30c01a66422de78ae84830a5202fe5f39240b0c8dbe1b60f8797 Jan 21 18:06:38 crc kubenswrapper[4799]: I0121 18:06:38.949495 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" event={"ID":"947392cf-f31e-4cc3-85b9-3fcf86b289ef","Type":"ContainerStarted","Data":"841402417ebc1b5e895558bf5212e901d471ed0bea897dd52c54cd03c1077bbb"} Jan 21 18:06:38 crc kubenswrapper[4799]: I0121 18:06:38.949916 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" event={"ID":"947392cf-f31e-4cc3-85b9-3fcf86b289ef","Type":"ContainerStarted","Data":"75391ddc4cbf30c01a66422de78ae84830a5202fe5f39240b0c8dbe1b60f8797"} Jan 21 18:06:38 crc kubenswrapper[4799]: I0121 18:06:38.980085 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" podStartSLOduration=1.568775763 podStartE2EDuration="1.980063795s" podCreationTimestamp="2026-01-21 18:06:37 +0000 UTC" firstStartedPulling="2026-01-21 18:06:37.932629116 +0000 UTC m=+2024.558919129" lastFinishedPulling="2026-01-21 18:06:38.343917118 +0000 UTC m=+2024.970207161" observedRunningTime="2026-01-21 18:06:38.972568226 +0000 UTC m=+2025.598858249" watchObservedRunningTime="2026-01-21 18:06:38.980063795 +0000 UTC m=+2025.606353818" Jan 21 18:06:49 crc kubenswrapper[4799]: I0121 18:06:49.059701 4799 generic.go:334] "Generic (PLEG): container finished" podID="947392cf-f31e-4cc3-85b9-3fcf86b289ef" containerID="841402417ebc1b5e895558bf5212e901d471ed0bea897dd52c54cd03c1077bbb" exitCode=0 Jan 21 18:06:49 crc kubenswrapper[4799]: I0121 18:06:49.059774 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" event={"ID":"947392cf-f31e-4cc3-85b9-3fcf86b289ef","Type":"ContainerDied","Data":"841402417ebc1b5e895558bf5212e901d471ed0bea897dd52c54cd03c1077bbb"} Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.533235 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.556568 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-ssh-key-openstack-edpm-ipam\") pod \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.556993 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmn56\" (UniqueName: \"kubernetes.io/projected/947392cf-f31e-4cc3-85b9-3fcf86b289ef-kube-api-access-kmn56\") pod \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.557957 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-inventory\") pod \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\" (UID: \"947392cf-f31e-4cc3-85b9-3fcf86b289ef\") " Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.566575 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/947392cf-f31e-4cc3-85b9-3fcf86b289ef-kube-api-access-kmn56" (OuterVolumeSpecName: "kube-api-access-kmn56") pod "947392cf-f31e-4cc3-85b9-3fcf86b289ef" (UID: "947392cf-f31e-4cc3-85b9-3fcf86b289ef"). InnerVolumeSpecName "kube-api-access-kmn56". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.598033 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-inventory" (OuterVolumeSpecName: "inventory") pod "947392cf-f31e-4cc3-85b9-3fcf86b289ef" (UID: "947392cf-f31e-4cc3-85b9-3fcf86b289ef"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.614802 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "947392cf-f31e-4cc3-85b9-3fcf86b289ef" (UID: "947392cf-f31e-4cc3-85b9-3fcf86b289ef"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.660427 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.660478 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/947392cf-f31e-4cc3-85b9-3fcf86b289ef-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:50 crc kubenswrapper[4799]: I0121 18:06:50.660490 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmn56\" (UniqueName: \"kubernetes.io/projected/947392cf-f31e-4cc3-85b9-3fcf86b289ef-kube-api-access-kmn56\") on node \"crc\" DevicePath \"\"" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.091940 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" event={"ID":"947392cf-f31e-4cc3-85b9-3fcf86b289ef","Type":"ContainerDied","Data":"75391ddc4cbf30c01a66422de78ae84830a5202fe5f39240b0c8dbe1b60f8797"} Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.092313 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75391ddc4cbf30c01a66422de78ae84830a5202fe5f39240b0c8dbe1b60f8797" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.092059 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.359021 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5"] Jan 21 18:06:51 crc kubenswrapper[4799]: E0121 18:06:51.360007 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947392cf-f31e-4cc3-85b9-3fcf86b289ef" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.360042 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="947392cf-f31e-4cc3-85b9-3fcf86b289ef" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.360456 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="947392cf-f31e-4cc3-85b9-3fcf86b289ef" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.361996 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.364491 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.365629 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.365920 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.366209 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.368101 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.368558 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.369469 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.369811 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375457 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375567 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375632 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375674 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375741 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375778 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375848 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375949 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.375989 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.376042 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.376083 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.376209 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.376257 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84dsr\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-kube-api-access-84dsr\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.376318 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.379445 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5"] Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478203 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478281 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478314 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478337 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478382 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478408 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478452 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478485 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478511 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478547 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478570 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478618 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478646 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84dsr\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-kube-api-access-84dsr\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.478698 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.484149 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.484432 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.485061 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.486025 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.486361 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.486735 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.487948 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.488478 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.488866 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.494972 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.495241 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.495346 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.495938 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.501757 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84dsr\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-kube-api-access-84dsr\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-phxp5\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:51 crc kubenswrapper[4799]: I0121 18:06:51.738067 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:06:52 crc kubenswrapper[4799]: I0121 18:06:52.282601 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5"] Jan 21 18:06:53 crc kubenswrapper[4799]: I0121 18:06:53.111231 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" event={"ID":"7e239b6d-2469-4d29-b1e1-72b1d7916ada","Type":"ContainerStarted","Data":"c581543e4432d173982576c530cc95026a248a2958d62b4559442ffab4137282"} Jan 21 18:06:53 crc kubenswrapper[4799]: I0121 18:06:53.111294 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" event={"ID":"7e239b6d-2469-4d29-b1e1-72b1d7916ada","Type":"ContainerStarted","Data":"160ec63796ca50f63b6cfdc6777e3bc24c0c8a220338a84b0ff397f9087b29ec"} Jan 21 18:06:53 crc kubenswrapper[4799]: I0121 18:06:53.143511 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" podStartSLOduration=1.695038368 podStartE2EDuration="2.143479852s" podCreationTimestamp="2026-01-21 18:06:51 +0000 UTC" firstStartedPulling="2026-01-21 18:06:52.285644999 +0000 UTC m=+2038.911935022" lastFinishedPulling="2026-01-21 18:06:52.734086473 +0000 UTC m=+2039.360376506" observedRunningTime="2026-01-21 18:06:53.134346599 +0000 UTC m=+2039.760636622" watchObservedRunningTime="2026-01-21 18:06:53.143479852 +0000 UTC m=+2039.769769885" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.703769 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkpj"] Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.707990 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.720007 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkpj"] Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.750536 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfzxg\" (UniqueName: \"kubernetes.io/projected/ed25639a-8422-419a-9a7c-cbf69d1f9d72-kube-api-access-dfzxg\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.750615 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-utilities\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.750650 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-catalog-content\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.852731 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfzxg\" (UniqueName: \"kubernetes.io/projected/ed25639a-8422-419a-9a7c-cbf69d1f9d72-kube-api-access-dfzxg\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.853108 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-utilities\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.853161 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-catalog-content\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.853711 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-utilities\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.853880 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-catalog-content\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:14 crc kubenswrapper[4799]: I0121 18:07:14.876300 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfzxg\" (UniqueName: \"kubernetes.io/projected/ed25639a-8422-419a-9a7c-cbf69d1f9d72-kube-api-access-dfzxg\") pod \"redhat-marketplace-fqkpj\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:15 crc kubenswrapper[4799]: I0121 18:07:15.044063 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:15 crc kubenswrapper[4799]: I0121 18:07:15.595834 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkpj"] Jan 21 18:07:16 crc kubenswrapper[4799]: I0121 18:07:16.397394 4799 generic.go:334] "Generic (PLEG): container finished" podID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerID="132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3" exitCode=0 Jan 21 18:07:16 crc kubenswrapper[4799]: I0121 18:07:16.397786 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkpj" event={"ID":"ed25639a-8422-419a-9a7c-cbf69d1f9d72","Type":"ContainerDied","Data":"132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3"} Jan 21 18:07:16 crc kubenswrapper[4799]: I0121 18:07:16.397827 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkpj" event={"ID":"ed25639a-8422-419a-9a7c-cbf69d1f9d72","Type":"ContainerStarted","Data":"632f85088eecdc2e21f27653bacd107131790ff4d833348f711c0d6107d1cfb2"} Jan 21 18:07:17 crc kubenswrapper[4799]: I0121 18:07:17.414120 4799 generic.go:334] "Generic (PLEG): container finished" podID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerID="8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265" exitCode=0 Jan 21 18:07:17 crc kubenswrapper[4799]: I0121 18:07:17.414236 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkpj" event={"ID":"ed25639a-8422-419a-9a7c-cbf69d1f9d72","Type":"ContainerDied","Data":"8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265"} Jan 21 18:07:19 crc kubenswrapper[4799]: I0121 18:07:19.439597 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkpj" event={"ID":"ed25639a-8422-419a-9a7c-cbf69d1f9d72","Type":"ContainerStarted","Data":"ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2"} Jan 21 18:07:19 crc kubenswrapper[4799]: I0121 18:07:19.466624 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fqkpj" podStartSLOduration=3.554362566 podStartE2EDuration="5.466592502s" podCreationTimestamp="2026-01-21 18:07:14 +0000 UTC" firstStartedPulling="2026-01-21 18:07:16.400179573 +0000 UTC m=+2063.026469606" lastFinishedPulling="2026-01-21 18:07:18.312409479 +0000 UTC m=+2064.938699542" observedRunningTime="2026-01-21 18:07:19.462335114 +0000 UTC m=+2066.088625157" watchObservedRunningTime="2026-01-21 18:07:19.466592502 +0000 UTC m=+2066.092882525" Jan 21 18:07:25 crc kubenswrapper[4799]: I0121 18:07:25.045601 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:25 crc kubenswrapper[4799]: I0121 18:07:25.046115 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:25 crc kubenswrapper[4799]: I0121 18:07:25.104372 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:25 crc kubenswrapper[4799]: I0121 18:07:25.610140 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:25 crc kubenswrapper[4799]: I0121 18:07:25.727201 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkpj"] Jan 21 18:07:27 crc kubenswrapper[4799]: I0121 18:07:27.548837 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fqkpj" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerName="registry-server" containerID="cri-o://ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2" gracePeriod=2 Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.043433 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.108079 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-utilities\") pod \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.108166 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfzxg\" (UniqueName: \"kubernetes.io/projected/ed25639a-8422-419a-9a7c-cbf69d1f9d72-kube-api-access-dfzxg\") pod \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.108538 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-catalog-content\") pod \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\" (UID: \"ed25639a-8422-419a-9a7c-cbf69d1f9d72\") " Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.108972 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-utilities" (OuterVolumeSpecName: "utilities") pod "ed25639a-8422-419a-9a7c-cbf69d1f9d72" (UID: "ed25639a-8422-419a-9a7c-cbf69d1f9d72"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.127614 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed25639a-8422-419a-9a7c-cbf69d1f9d72-kube-api-access-dfzxg" (OuterVolumeSpecName: "kube-api-access-dfzxg") pod "ed25639a-8422-419a-9a7c-cbf69d1f9d72" (UID: "ed25639a-8422-419a-9a7c-cbf69d1f9d72"). InnerVolumeSpecName "kube-api-access-dfzxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.139211 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed25639a-8422-419a-9a7c-cbf69d1f9d72" (UID: "ed25639a-8422-419a-9a7c-cbf69d1f9d72"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.210448 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.210524 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed25639a-8422-419a-9a7c-cbf69d1f9d72-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.210544 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfzxg\" (UniqueName: \"kubernetes.io/projected/ed25639a-8422-419a-9a7c-cbf69d1f9d72-kube-api-access-dfzxg\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.563705 4799 generic.go:334] "Generic (PLEG): container finished" podID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerID="ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2" exitCode=0 Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.563766 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkpj" event={"ID":"ed25639a-8422-419a-9a7c-cbf69d1f9d72","Type":"ContainerDied","Data":"ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2"} Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.563780 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fqkpj" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.563808 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkpj" event={"ID":"ed25639a-8422-419a-9a7c-cbf69d1f9d72","Type":"ContainerDied","Data":"632f85088eecdc2e21f27653bacd107131790ff4d833348f711c0d6107d1cfb2"} Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.563830 4799 scope.go:117] "RemoveContainer" containerID="ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.595994 4799 scope.go:117] "RemoveContainer" containerID="8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.597817 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkpj"] Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.610765 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkpj"] Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.620213 4799 scope.go:117] "RemoveContainer" containerID="132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.683673 4799 scope.go:117] "RemoveContainer" containerID="ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2" Jan 21 18:07:28 crc kubenswrapper[4799]: E0121 18:07:28.684408 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2\": container with ID starting with ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2 not found: ID does not exist" containerID="ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.684449 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2"} err="failed to get container status \"ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2\": rpc error: code = NotFound desc = could not find container \"ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2\": container with ID starting with ca1e3f1271159687c804ee4bd2b6dfec8517ea05f9a1917379064ad7ea71dac2 not found: ID does not exist" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.684482 4799 scope.go:117] "RemoveContainer" containerID="8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265" Jan 21 18:07:28 crc kubenswrapper[4799]: E0121 18:07:28.684952 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265\": container with ID starting with 8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265 not found: ID does not exist" containerID="8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.684986 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265"} err="failed to get container status \"8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265\": rpc error: code = NotFound desc = could not find container \"8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265\": container with ID starting with 8ce043b1741419d49f4da4f6a3215e5c86eeb8a249c45eb2894390bf1e618265 not found: ID does not exist" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.685007 4799 scope.go:117] "RemoveContainer" containerID="132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3" Jan 21 18:07:28 crc kubenswrapper[4799]: E0121 18:07:28.685475 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3\": container with ID starting with 132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3 not found: ID does not exist" containerID="132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3" Jan 21 18:07:28 crc kubenswrapper[4799]: I0121 18:07:28.685524 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3"} err="failed to get container status \"132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3\": rpc error: code = NotFound desc = could not find container \"132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3\": container with ID starting with 132159cea86d65cea4b5a10233aa520f86b8b0079d7abfc927aa7544c9b843c3 not found: ID does not exist" Jan 21 18:07:30 crc kubenswrapper[4799]: I0121 18:07:30.222830 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" path="/var/lib/kubelet/pods/ed25639a-8422-419a-9a7c-cbf69d1f9d72/volumes" Jan 21 18:07:37 crc kubenswrapper[4799]: I0121 18:07:37.655727 4799 generic.go:334] "Generic (PLEG): container finished" podID="7e239b6d-2469-4d29-b1e1-72b1d7916ada" containerID="c581543e4432d173982576c530cc95026a248a2958d62b4559442ffab4137282" exitCode=0 Jan 21 18:07:37 crc kubenswrapper[4799]: I0121 18:07:37.655816 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" event={"ID":"7e239b6d-2469-4d29-b1e1-72b1d7916ada","Type":"ContainerDied","Data":"c581543e4432d173982576c530cc95026a248a2958d62b4559442ffab4137282"} Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.094441 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.164992 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.173099 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.267986 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268073 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-libvirt-combined-ca-bundle\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268261 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268335 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ovn-combined-ca-bundle\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268403 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-ovn-default-certs-0\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268477 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-repo-setup-combined-ca-bundle\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268548 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-nova-combined-ca-bundle\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268576 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-inventory\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268637 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ssh-key-openstack-edpm-ipam\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268713 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-neutron-metadata-combined-ca-bundle\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268749 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-telemetry-combined-ca-bundle\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268787 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84dsr\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-kube-api-access-84dsr\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.268812 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-bootstrap-combined-ca-bundle\") pod \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\" (UID: \"7e239b6d-2469-4d29-b1e1-72b1d7916ada\") " Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.270245 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.273010 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.273842 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.276860 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.276916 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.277649 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-kube-api-access-84dsr" (OuterVolumeSpecName: "kube-api-access-84dsr") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "kube-api-access-84dsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.277889 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.278236 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.280088 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.281325 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.284362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.285397 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.301592 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-inventory" (OuterVolumeSpecName: "inventory") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.312326 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7e239b6d-2469-4d29-b1e1-72b1d7916ada" (UID: "7e239b6d-2469-4d29-b1e1-72b1d7916ada"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372517 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372567 4799 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372584 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84dsr\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-kube-api-access-84dsr\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372597 4799 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372611 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372627 4799 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372640 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372657 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372670 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/7e239b6d-2469-4d29-b1e1-72b1d7916ada-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372683 4799 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372696 4799 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372714 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.372724 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e239b6d-2469-4d29-b1e1-72b1d7916ada-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.688210 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" event={"ID":"7e239b6d-2469-4d29-b1e1-72b1d7916ada","Type":"ContainerDied","Data":"160ec63796ca50f63b6cfdc6777e3bc24c0c8a220338a84b0ff397f9087b29ec"} Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.688264 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="160ec63796ca50f63b6cfdc6777e3bc24c0c8a220338a84b0ff397f9087b29ec" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.688363 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-phxp5" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.819865 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt"] Jan 21 18:07:39 crc kubenswrapper[4799]: E0121 18:07:39.820437 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e239b6d-2469-4d29-b1e1-72b1d7916ada" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.820466 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e239b6d-2469-4d29-b1e1-72b1d7916ada" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 21 18:07:39 crc kubenswrapper[4799]: E0121 18:07:39.820492 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerName="registry-server" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.820500 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerName="registry-server" Jan 21 18:07:39 crc kubenswrapper[4799]: E0121 18:07:39.820517 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerName="extract-utilities" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.820525 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerName="extract-utilities" Jan 21 18:07:39 crc kubenswrapper[4799]: E0121 18:07:39.820541 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerName="extract-content" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.820550 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerName="extract-content" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.820854 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e239b6d-2469-4d29-b1e1-72b1d7916ada" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.820889 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed25639a-8422-419a-9a7c-cbf69d1f9d72" containerName="registry-server" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.821731 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.824462 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.824611 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.825107 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.825220 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.827434 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.836426 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt"] Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.883191 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.883243 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvm9z\" (UniqueName: \"kubernetes.io/projected/a1a6a3df-3a95-4614-92f5-25fd585431b5-kube-api-access-bvm9z\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.883272 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.883487 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.883541 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.985241 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.986305 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.986570 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvm9z\" (UniqueName: \"kubernetes.io/projected/a1a6a3df-3a95-4614-92f5-25fd585431b5-kube-api-access-bvm9z\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.986637 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.986672 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.988115 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.990848 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.990882 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:39 crc kubenswrapper[4799]: I0121 18:07:39.991228 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:40 crc kubenswrapper[4799]: I0121 18:07:40.006427 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvm9z\" (UniqueName: \"kubernetes.io/projected/a1a6a3df-3a95-4614-92f5-25fd585431b5-kube-api-access-bvm9z\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-mrgkt\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:40 crc kubenswrapper[4799]: I0121 18:07:40.155115 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:07:40 crc kubenswrapper[4799]: I0121 18:07:40.743348 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt"] Jan 21 18:07:41 crc kubenswrapper[4799]: I0121 18:07:41.707545 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" event={"ID":"a1a6a3df-3a95-4614-92f5-25fd585431b5","Type":"ContainerStarted","Data":"29158cf232c8ed12a9c580f37a7793c61bc2cbe48bab66799cabec3b47cb4198"} Jan 21 18:07:41 crc kubenswrapper[4799]: I0121 18:07:41.707947 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" event={"ID":"a1a6a3df-3a95-4614-92f5-25fd585431b5","Type":"ContainerStarted","Data":"b4673b16597e026f93edaa032c3f7b3eeaabdf053b968ff40aba72249ea1da74"} Jan 21 18:07:41 crc kubenswrapper[4799]: I0121 18:07:41.730662 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" podStartSLOduration=2.292417612 podStartE2EDuration="2.730638524s" podCreationTimestamp="2026-01-21 18:07:39 +0000 UTC" firstStartedPulling="2026-01-21 18:07:40.755021733 +0000 UTC m=+2087.381311756" lastFinishedPulling="2026-01-21 18:07:41.193242625 +0000 UTC m=+2087.819532668" observedRunningTime="2026-01-21 18:07:41.727240469 +0000 UTC m=+2088.353530512" watchObservedRunningTime="2026-01-21 18:07:41.730638524 +0000 UTC m=+2088.356928547" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.640581 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m27hw"] Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.663100 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.697003 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m27hw"] Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.805671 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-utilities\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.805773 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-958l4\" (UniqueName: \"kubernetes.io/projected/16bc0f39-5c57-423c-8a18-7760dd5fb061-kube-api-access-958l4\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.805836 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-catalog-content\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.907308 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-utilities\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.907419 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-958l4\" (UniqueName: \"kubernetes.io/projected/16bc0f39-5c57-423c-8a18-7760dd5fb061-kube-api-access-958l4\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.907491 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-catalog-content\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.907875 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-utilities\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.907909 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-catalog-content\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:46 crc kubenswrapper[4799]: I0121 18:07:46.932143 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-958l4\" (UniqueName: \"kubernetes.io/projected/16bc0f39-5c57-423c-8a18-7760dd5fb061-kube-api-access-958l4\") pod \"redhat-operators-m27hw\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:47 crc kubenswrapper[4799]: I0121 18:07:47.007118 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:47 crc kubenswrapper[4799]: I0121 18:07:47.538930 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m27hw"] Jan 21 18:07:47 crc kubenswrapper[4799]: I0121 18:07:47.769515 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m27hw" event={"ID":"16bc0f39-5c57-423c-8a18-7760dd5fb061","Type":"ContainerStarted","Data":"456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c"} Jan 21 18:07:47 crc kubenswrapper[4799]: I0121 18:07:47.769577 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m27hw" event={"ID":"16bc0f39-5c57-423c-8a18-7760dd5fb061","Type":"ContainerStarted","Data":"621e0f57c5d794a5b161676b40b3eddeab3c84d9b9089b4a46a570a50f61f6ba"} Jan 21 18:07:48 crc kubenswrapper[4799]: I0121 18:07:48.783654 4799 generic.go:334] "Generic (PLEG): container finished" podID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerID="456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c" exitCode=0 Jan 21 18:07:48 crc kubenswrapper[4799]: I0121 18:07:48.783782 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m27hw" event={"ID":"16bc0f39-5c57-423c-8a18-7760dd5fb061","Type":"ContainerDied","Data":"456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c"} Jan 21 18:07:50 crc kubenswrapper[4799]: I0121 18:07:50.834977 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m27hw" event={"ID":"16bc0f39-5c57-423c-8a18-7760dd5fb061","Type":"ContainerStarted","Data":"0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b"} Jan 21 18:07:52 crc kubenswrapper[4799]: I0121 18:07:52.861113 4799 generic.go:334] "Generic (PLEG): container finished" podID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerID="0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b" exitCode=0 Jan 21 18:07:52 crc kubenswrapper[4799]: I0121 18:07:52.861182 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m27hw" event={"ID":"16bc0f39-5c57-423c-8a18-7760dd5fb061","Type":"ContainerDied","Data":"0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b"} Jan 21 18:07:53 crc kubenswrapper[4799]: I0121 18:07:53.873506 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m27hw" event={"ID":"16bc0f39-5c57-423c-8a18-7760dd5fb061","Type":"ContainerStarted","Data":"cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0"} Jan 21 18:07:57 crc kubenswrapper[4799]: I0121 18:07:57.007324 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:57 crc kubenswrapper[4799]: I0121 18:07:57.007601 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:07:58 crc kubenswrapper[4799]: I0121 18:07:58.050982 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m27hw" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="registry-server" probeResult="failure" output=< Jan 21 18:07:58 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 18:07:58 crc kubenswrapper[4799]: > Jan 21 18:08:07 crc kubenswrapper[4799]: I0121 18:08:07.071565 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:08:07 crc kubenswrapper[4799]: I0121 18:08:07.096802 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m27hw" podStartSLOduration=16.456416718 podStartE2EDuration="21.09678283s" podCreationTimestamp="2026-01-21 18:07:46 +0000 UTC" firstStartedPulling="2026-01-21 18:07:48.786893311 +0000 UTC m=+2095.413183334" lastFinishedPulling="2026-01-21 18:07:53.427259383 +0000 UTC m=+2100.053549446" observedRunningTime="2026-01-21 18:07:53.902219621 +0000 UTC m=+2100.528509654" watchObservedRunningTime="2026-01-21 18:08:07.09678283 +0000 UTC m=+2113.723072843" Jan 21 18:08:07 crc kubenswrapper[4799]: I0121 18:08:07.149422 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:08:07 crc kubenswrapper[4799]: I0121 18:08:07.314768 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m27hw"] Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.037897 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m27hw" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="registry-server" containerID="cri-o://cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0" gracePeriod=2 Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.542186 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.739193 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-catalog-content\") pod \"16bc0f39-5c57-423c-8a18-7760dd5fb061\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.739252 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-958l4\" (UniqueName: \"kubernetes.io/projected/16bc0f39-5c57-423c-8a18-7760dd5fb061-kube-api-access-958l4\") pod \"16bc0f39-5c57-423c-8a18-7760dd5fb061\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.739371 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-utilities\") pod \"16bc0f39-5c57-423c-8a18-7760dd5fb061\" (UID: \"16bc0f39-5c57-423c-8a18-7760dd5fb061\") " Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.740844 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-utilities" (OuterVolumeSpecName: "utilities") pod "16bc0f39-5c57-423c-8a18-7760dd5fb061" (UID: "16bc0f39-5c57-423c-8a18-7760dd5fb061"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.746521 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16bc0f39-5c57-423c-8a18-7760dd5fb061-kube-api-access-958l4" (OuterVolumeSpecName: "kube-api-access-958l4") pod "16bc0f39-5c57-423c-8a18-7760dd5fb061" (UID: "16bc0f39-5c57-423c-8a18-7760dd5fb061"). InnerVolumeSpecName "kube-api-access-958l4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.842910 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-958l4\" (UniqueName: \"kubernetes.io/projected/16bc0f39-5c57-423c-8a18-7760dd5fb061-kube-api-access-958l4\") on node \"crc\" DevicePath \"\"" Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.842945 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.878612 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "16bc0f39-5c57-423c-8a18-7760dd5fb061" (UID: "16bc0f39-5c57-423c-8a18-7760dd5fb061"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:08:09 crc kubenswrapper[4799]: I0121 18:08:09.945084 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16bc0f39-5c57-423c-8a18-7760dd5fb061-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.055198 4799 generic.go:334] "Generic (PLEG): container finished" podID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerID="cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0" exitCode=0 Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.055305 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m27hw" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.055289 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m27hw" event={"ID":"16bc0f39-5c57-423c-8a18-7760dd5fb061","Type":"ContainerDied","Data":"cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0"} Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.055634 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m27hw" event={"ID":"16bc0f39-5c57-423c-8a18-7760dd5fb061","Type":"ContainerDied","Data":"621e0f57c5d794a5b161676b40b3eddeab3c84d9b9089b4a46a570a50f61f6ba"} Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.055663 4799 scope.go:117] "RemoveContainer" containerID="cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.095326 4799 scope.go:117] "RemoveContainer" containerID="0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.098120 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m27hw"] Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.109611 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m27hw"] Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.126768 4799 scope.go:117] "RemoveContainer" containerID="456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.166861 4799 scope.go:117] "RemoveContainer" containerID="cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0" Jan 21 18:08:10 crc kubenswrapper[4799]: E0121 18:08:10.167561 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0\": container with ID starting with cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0 not found: ID does not exist" containerID="cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.167597 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0"} err="failed to get container status \"cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0\": rpc error: code = NotFound desc = could not find container \"cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0\": container with ID starting with cf4322bb4d8b690c4ffb72e1820ac26fc4ffc705902c8c00ed4dd57d5ef1f3e0 not found: ID does not exist" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.167627 4799 scope.go:117] "RemoveContainer" containerID="0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b" Jan 21 18:08:10 crc kubenswrapper[4799]: E0121 18:08:10.168068 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b\": container with ID starting with 0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b not found: ID does not exist" containerID="0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.168098 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b"} err="failed to get container status \"0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b\": rpc error: code = NotFound desc = could not find container \"0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b\": container with ID starting with 0d023cf61da94de6324c9cb0b6f422a5697855141651b58fdf520aa00564e76b not found: ID does not exist" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.168115 4799 scope.go:117] "RemoveContainer" containerID="456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c" Jan 21 18:08:10 crc kubenswrapper[4799]: E0121 18:08:10.168459 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c\": container with ID starting with 456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c not found: ID does not exist" containerID="456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.168484 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c"} err="failed to get container status \"456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c\": rpc error: code = NotFound desc = could not find container \"456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c\": container with ID starting with 456a7f83393fb424f696767162c6c21cd685d7c5f9c4184265f73187b2e2795c not found: ID does not exist" Jan 21 18:08:10 crc kubenswrapper[4799]: I0121 18:08:10.233697 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" path="/var/lib/kubelet/pods/16bc0f39-5c57-423c-8a18-7760dd5fb061/volumes" Jan 21 18:08:51 crc kubenswrapper[4799]: I0121 18:08:51.495774 4799 generic.go:334] "Generic (PLEG): container finished" podID="a1a6a3df-3a95-4614-92f5-25fd585431b5" containerID="29158cf232c8ed12a9c580f37a7793c61bc2cbe48bab66799cabec3b47cb4198" exitCode=0 Jan 21 18:08:51 crc kubenswrapper[4799]: I0121 18:08:51.495868 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" event={"ID":"a1a6a3df-3a95-4614-92f5-25fd585431b5","Type":"ContainerDied","Data":"29158cf232c8ed12a9c580f37a7793c61bc2cbe48bab66799cabec3b47cb4198"} Jan 21 18:08:52 crc kubenswrapper[4799]: I0121 18:08:52.957747 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.063440 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovn-combined-ca-bundle\") pod \"a1a6a3df-3a95-4614-92f5-25fd585431b5\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.063525 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvm9z\" (UniqueName: \"kubernetes.io/projected/a1a6a3df-3a95-4614-92f5-25fd585431b5-kube-api-access-bvm9z\") pod \"a1a6a3df-3a95-4614-92f5-25fd585431b5\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.063571 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovncontroller-config-0\") pod \"a1a6a3df-3a95-4614-92f5-25fd585431b5\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.063645 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-inventory\") pod \"a1a6a3df-3a95-4614-92f5-25fd585431b5\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.063708 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ssh-key-openstack-edpm-ipam\") pod \"a1a6a3df-3a95-4614-92f5-25fd585431b5\" (UID: \"a1a6a3df-3a95-4614-92f5-25fd585431b5\") " Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.072406 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "a1a6a3df-3a95-4614-92f5-25fd585431b5" (UID: "a1a6a3df-3a95-4614-92f5-25fd585431b5"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.072457 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1a6a3df-3a95-4614-92f5-25fd585431b5-kube-api-access-bvm9z" (OuterVolumeSpecName: "kube-api-access-bvm9z") pod "a1a6a3df-3a95-4614-92f5-25fd585431b5" (UID: "a1a6a3df-3a95-4614-92f5-25fd585431b5"). InnerVolumeSpecName "kube-api-access-bvm9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.098890 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "a1a6a3df-3a95-4614-92f5-25fd585431b5" (UID: "a1a6a3df-3a95-4614-92f5-25fd585431b5"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.102351 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "a1a6a3df-3a95-4614-92f5-25fd585431b5" (UID: "a1a6a3df-3a95-4614-92f5-25fd585431b5"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.108586 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-inventory" (OuterVolumeSpecName: "inventory") pod "a1a6a3df-3a95-4614-92f5-25fd585431b5" (UID: "a1a6a3df-3a95-4614-92f5-25fd585431b5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.166086 4799 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.166121 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.166142 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.166152 4799 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1a6a3df-3a95-4614-92f5-25fd585431b5-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.166160 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvm9z\" (UniqueName: \"kubernetes.io/projected/a1a6a3df-3a95-4614-92f5-25fd585431b5-kube-api-access-bvm9z\") on node \"crc\" DevicePath \"\"" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.515776 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" event={"ID":"a1a6a3df-3a95-4614-92f5-25fd585431b5","Type":"ContainerDied","Data":"b4673b16597e026f93edaa032c3f7b3eeaabdf053b968ff40aba72249ea1da74"} Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.515828 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4673b16597e026f93edaa032c3f7b3eeaabdf053b968ff40aba72249ea1da74" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.515826 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-mrgkt" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.740903 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s"] Jan 21 18:08:53 crc kubenswrapper[4799]: E0121 18:08:53.741631 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="extract-utilities" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.741714 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="extract-utilities" Jan 21 18:08:53 crc kubenswrapper[4799]: E0121 18:08:53.741783 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1a6a3df-3a95-4614-92f5-25fd585431b5" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.741848 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1a6a3df-3a95-4614-92f5-25fd585431b5" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 21 18:08:53 crc kubenswrapper[4799]: E0121 18:08:53.741916 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="registry-server" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.741968 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="registry-server" Jan 21 18:08:53 crc kubenswrapper[4799]: E0121 18:08:53.742041 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="extract-content" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.742093 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="extract-content" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.742362 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1a6a3df-3a95-4614-92f5-25fd585431b5" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.742449 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="16bc0f39-5c57-423c-8a18-7760dd5fb061" containerName="registry-server" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.743283 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.745776 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.745927 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.745963 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.746087 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.746201 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.746237 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.755020 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s"] Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.879788 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.879969 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.880056 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkpr8\" (UniqueName: \"kubernetes.io/projected/f5e75302-b14f-4281-93e3-a40bff4bcafa-kube-api-access-zkpr8\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.880468 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.880632 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.880730 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.982878 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.983333 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.983402 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkpr8\" (UniqueName: \"kubernetes.io/projected/f5e75302-b14f-4281-93e3-a40bff4bcafa-kube-api-access-zkpr8\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.983533 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.983584 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.983628 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.987599 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.988738 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.990727 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.992810 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:53 crc kubenswrapper[4799]: I0121 18:08:53.995716 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:54 crc kubenswrapper[4799]: I0121 18:08:54.007693 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkpr8\" (UniqueName: \"kubernetes.io/projected/f5e75302-b14f-4281-93e3-a40bff4bcafa-kube-api-access-zkpr8\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:54 crc kubenswrapper[4799]: I0121 18:08:54.061459 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:08:54 crc kubenswrapper[4799]: I0121 18:08:54.681280 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s"] Jan 21 18:08:55 crc kubenswrapper[4799]: I0121 18:08:55.176916 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:08:55 crc kubenswrapper[4799]: I0121 18:08:55.584893 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" event={"ID":"f5e75302-b14f-4281-93e3-a40bff4bcafa","Type":"ContainerStarted","Data":"5cdd18592f3dadc161fc1f20dee65d04fb58f764b74922b4f59cc45c477c56ea"} Jan 21 18:08:55 crc kubenswrapper[4799]: I0121 18:08:55.585770 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" event={"ID":"f5e75302-b14f-4281-93e3-a40bff4bcafa","Type":"ContainerStarted","Data":"67be31e4da2355af428b4b0e4b9495d53773beb6125cabdb382f87b307f5a0c0"} Jan 21 18:08:55 crc kubenswrapper[4799]: I0121 18:08:55.609218 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" podStartSLOduration=2.130302147 podStartE2EDuration="2.609196284s" podCreationTimestamp="2026-01-21 18:08:53 +0000 UTC" firstStartedPulling="2026-01-21 18:08:54.695433387 +0000 UTC m=+2161.321723410" lastFinishedPulling="2026-01-21 18:08:55.174327524 +0000 UTC m=+2161.800617547" observedRunningTime="2026-01-21 18:08:55.60009399 +0000 UTC m=+2162.226384033" watchObservedRunningTime="2026-01-21 18:08:55.609196284 +0000 UTC m=+2162.235486307" Jan 21 18:08:55 crc kubenswrapper[4799]: I0121 18:08:55.971502 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:08:55 crc kubenswrapper[4799]: I0121 18:08:55.971583 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.407647 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xf7hw"] Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.411984 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.421005 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw9rr\" (UniqueName: \"kubernetes.io/projected/32d8d872-9540-452f-9d00-5b03eba978ea-kube-api-access-nw9rr\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.421094 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-catalog-content\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.421189 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-utilities\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.424465 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xf7hw"] Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.523355 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw9rr\" (UniqueName: \"kubernetes.io/projected/32d8d872-9540-452f-9d00-5b03eba978ea-kube-api-access-nw9rr\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.523460 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-catalog-content\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.523561 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-utilities\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.524028 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-catalog-content\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.524200 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-utilities\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.553795 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw9rr\" (UniqueName: \"kubernetes.io/projected/32d8d872-9540-452f-9d00-5b03eba978ea-kube-api-access-nw9rr\") pod \"certified-operators-xf7hw\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:04 crc kubenswrapper[4799]: I0121 18:09:04.756094 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:05 crc kubenswrapper[4799]: I0121 18:09:05.342242 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xf7hw"] Jan 21 18:09:05 crc kubenswrapper[4799]: W0121 18:09:05.343247 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32d8d872_9540_452f_9d00_5b03eba978ea.slice/crio-0123ca24feea18d145af5a3365b5570bafc1a07e90f7020071e23cb4b4de1246 WatchSource:0}: Error finding container 0123ca24feea18d145af5a3365b5570bafc1a07e90f7020071e23cb4b4de1246: Status 404 returned error can't find the container with id 0123ca24feea18d145af5a3365b5570bafc1a07e90f7020071e23cb4b4de1246 Jan 21 18:09:05 crc kubenswrapper[4799]: I0121 18:09:05.711782 4799 generic.go:334] "Generic (PLEG): container finished" podID="32d8d872-9540-452f-9d00-5b03eba978ea" containerID="6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220" exitCode=0 Jan 21 18:09:05 crc kubenswrapper[4799]: I0121 18:09:05.711872 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xf7hw" event={"ID":"32d8d872-9540-452f-9d00-5b03eba978ea","Type":"ContainerDied","Data":"6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220"} Jan 21 18:09:05 crc kubenswrapper[4799]: I0121 18:09:05.712048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xf7hw" event={"ID":"32d8d872-9540-452f-9d00-5b03eba978ea","Type":"ContainerStarted","Data":"0123ca24feea18d145af5a3365b5570bafc1a07e90f7020071e23cb4b4de1246"} Jan 21 18:09:06 crc kubenswrapper[4799]: I0121 18:09:06.722562 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xf7hw" event={"ID":"32d8d872-9540-452f-9d00-5b03eba978ea","Type":"ContainerStarted","Data":"8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1"} Jan 21 18:09:07 crc kubenswrapper[4799]: I0121 18:09:07.736995 4799 generic.go:334] "Generic (PLEG): container finished" podID="32d8d872-9540-452f-9d00-5b03eba978ea" containerID="8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1" exitCode=0 Jan 21 18:09:07 crc kubenswrapper[4799]: I0121 18:09:07.737154 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xf7hw" event={"ID":"32d8d872-9540-452f-9d00-5b03eba978ea","Type":"ContainerDied","Data":"8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1"} Jan 21 18:09:08 crc kubenswrapper[4799]: I0121 18:09:08.750509 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xf7hw" event={"ID":"32d8d872-9540-452f-9d00-5b03eba978ea","Type":"ContainerStarted","Data":"88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba"} Jan 21 18:09:08 crc kubenswrapper[4799]: I0121 18:09:08.783420 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xf7hw" podStartSLOduration=2.371192984 podStartE2EDuration="4.783403686s" podCreationTimestamp="2026-01-21 18:09:04 +0000 UTC" firstStartedPulling="2026-01-21 18:09:05.713827508 +0000 UTC m=+2172.340117551" lastFinishedPulling="2026-01-21 18:09:08.12603823 +0000 UTC m=+2174.752328253" observedRunningTime="2026-01-21 18:09:08.776566455 +0000 UTC m=+2175.402856488" watchObservedRunningTime="2026-01-21 18:09:08.783403686 +0000 UTC m=+2175.409693709" Jan 21 18:09:14 crc kubenswrapper[4799]: I0121 18:09:14.756840 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:14 crc kubenswrapper[4799]: I0121 18:09:14.757417 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:14 crc kubenswrapper[4799]: I0121 18:09:14.824136 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:14 crc kubenswrapper[4799]: I0121 18:09:14.884062 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:15 crc kubenswrapper[4799]: I0121 18:09:15.066168 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xf7hw"] Jan 21 18:09:16 crc kubenswrapper[4799]: I0121 18:09:16.830833 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xf7hw" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" containerName="registry-server" containerID="cri-o://88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba" gracePeriod=2 Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.311587 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.403844 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw9rr\" (UniqueName: \"kubernetes.io/projected/32d8d872-9540-452f-9d00-5b03eba978ea-kube-api-access-nw9rr\") pod \"32d8d872-9540-452f-9d00-5b03eba978ea\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.404361 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-catalog-content\") pod \"32d8d872-9540-452f-9d00-5b03eba978ea\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.404418 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-utilities\") pod \"32d8d872-9540-452f-9d00-5b03eba978ea\" (UID: \"32d8d872-9540-452f-9d00-5b03eba978ea\") " Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.405431 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-utilities" (OuterVolumeSpecName: "utilities") pod "32d8d872-9540-452f-9d00-5b03eba978ea" (UID: "32d8d872-9540-452f-9d00-5b03eba978ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.409340 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32d8d872-9540-452f-9d00-5b03eba978ea-kube-api-access-nw9rr" (OuterVolumeSpecName: "kube-api-access-nw9rr") pod "32d8d872-9540-452f-9d00-5b03eba978ea" (UID: "32d8d872-9540-452f-9d00-5b03eba978ea"). InnerVolumeSpecName "kube-api-access-nw9rr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.454362 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32d8d872-9540-452f-9d00-5b03eba978ea" (UID: "32d8d872-9540-452f-9d00-5b03eba978ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.507205 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw9rr\" (UniqueName: \"kubernetes.io/projected/32d8d872-9540-452f-9d00-5b03eba978ea-kube-api-access-nw9rr\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.507246 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.507257 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32d8d872-9540-452f-9d00-5b03eba978ea-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.841858 4799 generic.go:334] "Generic (PLEG): container finished" podID="32d8d872-9540-452f-9d00-5b03eba978ea" containerID="88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba" exitCode=0 Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.841914 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xf7hw" event={"ID":"32d8d872-9540-452f-9d00-5b03eba978ea","Type":"ContainerDied","Data":"88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba"} Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.841936 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xf7hw" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.841955 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xf7hw" event={"ID":"32d8d872-9540-452f-9d00-5b03eba978ea","Type":"ContainerDied","Data":"0123ca24feea18d145af5a3365b5570bafc1a07e90f7020071e23cb4b4de1246"} Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.841987 4799 scope.go:117] "RemoveContainer" containerID="88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.874611 4799 scope.go:117] "RemoveContainer" containerID="8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.891280 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xf7hw"] Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.900548 4799 scope.go:117] "RemoveContainer" containerID="6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.908667 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xf7hw"] Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.952615 4799 scope.go:117] "RemoveContainer" containerID="88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba" Jan 21 18:09:17 crc kubenswrapper[4799]: E0121 18:09:17.953054 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba\": container with ID starting with 88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba not found: ID does not exist" containerID="88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.953098 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba"} err="failed to get container status \"88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba\": rpc error: code = NotFound desc = could not find container \"88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba\": container with ID starting with 88afd7e190bdec951e29307899fda64fb3ad57da1434bb2765b634e9f6285cba not found: ID does not exist" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.953183 4799 scope.go:117] "RemoveContainer" containerID="8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1" Jan 21 18:09:17 crc kubenswrapper[4799]: E0121 18:09:17.953514 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1\": container with ID starting with 8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1 not found: ID does not exist" containerID="8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.953548 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1"} err="failed to get container status \"8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1\": rpc error: code = NotFound desc = could not find container \"8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1\": container with ID starting with 8eb0c7ae1eee8acc30c1eb10b1d1d95fb9320ff13fb32588d7b5210a6a0680c1 not found: ID does not exist" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.953568 4799 scope.go:117] "RemoveContainer" containerID="6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220" Jan 21 18:09:17 crc kubenswrapper[4799]: E0121 18:09:17.953909 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220\": container with ID starting with 6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220 not found: ID does not exist" containerID="6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220" Jan 21 18:09:17 crc kubenswrapper[4799]: I0121 18:09:17.954005 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220"} err="failed to get container status \"6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220\": rpc error: code = NotFound desc = could not find container \"6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220\": container with ID starting with 6812085b9f6dff86731662eed4e94e56b6999a60f7dbb159f850a3d1c3cae220 not found: ID does not exist" Jan 21 18:09:18 crc kubenswrapper[4799]: I0121 18:09:18.219830 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" path="/var/lib/kubelet/pods/32d8d872-9540-452f-9d00-5b03eba978ea/volumes" Jan 21 18:09:25 crc kubenswrapper[4799]: I0121 18:09:25.971619 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:09:25 crc kubenswrapper[4799]: I0121 18:09:25.972533 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:09:52 crc kubenswrapper[4799]: I0121 18:09:52.296332 4799 generic.go:334] "Generic (PLEG): container finished" podID="f5e75302-b14f-4281-93e3-a40bff4bcafa" containerID="5cdd18592f3dadc161fc1f20dee65d04fb58f764b74922b4f59cc45c477c56ea" exitCode=0 Jan 21 18:09:52 crc kubenswrapper[4799]: I0121 18:09:52.296493 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" event={"ID":"f5e75302-b14f-4281-93e3-a40bff4bcafa","Type":"ContainerDied","Data":"5cdd18592f3dadc161fc1f20dee65d04fb58f764b74922b4f59cc45c477c56ea"} Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.772686 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.946684 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkpr8\" (UniqueName: \"kubernetes.io/projected/f5e75302-b14f-4281-93e3-a40bff4bcafa-kube-api-access-zkpr8\") pod \"f5e75302-b14f-4281-93e3-a40bff4bcafa\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.946791 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-metadata-combined-ca-bundle\") pod \"f5e75302-b14f-4281-93e3-a40bff4bcafa\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.946878 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-inventory\") pod \"f5e75302-b14f-4281-93e3-a40bff4bcafa\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.947026 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-ssh-key-openstack-edpm-ipam\") pod \"f5e75302-b14f-4281-93e3-a40bff4bcafa\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.947089 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-nova-metadata-neutron-config-0\") pod \"f5e75302-b14f-4281-93e3-a40bff4bcafa\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.947222 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-ovn-metadata-agent-neutron-config-0\") pod \"f5e75302-b14f-4281-93e3-a40bff4bcafa\" (UID: \"f5e75302-b14f-4281-93e3-a40bff4bcafa\") " Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.953858 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e75302-b14f-4281-93e3-a40bff4bcafa-kube-api-access-zkpr8" (OuterVolumeSpecName: "kube-api-access-zkpr8") pod "f5e75302-b14f-4281-93e3-a40bff4bcafa" (UID: "f5e75302-b14f-4281-93e3-a40bff4bcafa"). InnerVolumeSpecName "kube-api-access-zkpr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.953988 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f5e75302-b14f-4281-93e3-a40bff4bcafa" (UID: "f5e75302-b14f-4281-93e3-a40bff4bcafa"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.982338 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "f5e75302-b14f-4281-93e3-a40bff4bcafa" (UID: "f5e75302-b14f-4281-93e3-a40bff4bcafa"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.992068 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "f5e75302-b14f-4281-93e3-a40bff4bcafa" (UID: "f5e75302-b14f-4281-93e3-a40bff4bcafa"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.993756 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "f5e75302-b14f-4281-93e3-a40bff4bcafa" (UID: "f5e75302-b14f-4281-93e3-a40bff4bcafa"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:09:53 crc kubenswrapper[4799]: I0121 18:09:53.998612 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-inventory" (OuterVolumeSpecName: "inventory") pod "f5e75302-b14f-4281-93e3-a40bff4bcafa" (UID: "f5e75302-b14f-4281-93e3-a40bff4bcafa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.050194 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.050228 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.050243 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.050255 4799 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.050270 4799 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f5e75302-b14f-4281-93e3-a40bff4bcafa-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.050286 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkpr8\" (UniqueName: \"kubernetes.io/projected/f5e75302-b14f-4281-93e3-a40bff4bcafa-kube-api-access-zkpr8\") on node \"crc\" DevicePath \"\"" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.324341 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" event={"ID":"f5e75302-b14f-4281-93e3-a40bff4bcafa","Type":"ContainerDied","Data":"67be31e4da2355af428b4b0e4b9495d53773beb6125cabdb382f87b307f5a0c0"} Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.324739 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67be31e4da2355af428b4b0e4b9495d53773beb6125cabdb382f87b307f5a0c0" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.324511 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.470174 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx"] Jan 21 18:09:54 crc kubenswrapper[4799]: E0121 18:09:54.471013 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" containerName="extract-content" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.471037 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" containerName="extract-content" Jan 21 18:09:54 crc kubenswrapper[4799]: E0121 18:09:54.471058 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e75302-b14f-4281-93e3-a40bff4bcafa" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.471072 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e75302-b14f-4281-93e3-a40bff4bcafa" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 21 18:09:54 crc kubenswrapper[4799]: E0121 18:09:54.471115 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" containerName="registry-server" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.471228 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" containerName="registry-server" Jan 21 18:09:54 crc kubenswrapper[4799]: E0121 18:09:54.471255 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" containerName="extract-utilities" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.471267 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" containerName="extract-utilities" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.471648 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="32d8d872-9540-452f-9d00-5b03eba978ea" containerName="registry-server" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.471670 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5e75302-b14f-4281-93e3-a40bff4bcafa" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.473056 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.477504 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.477578 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.477789 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.477962 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.478146 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.486616 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx"] Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.564463 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.564743 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.564791 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.564924 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.565114 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk87h\" (UniqueName: \"kubernetes.io/projected/34ff08b0-f833-4c42-90a7-68af92ba7ce8-kube-api-access-qk87h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.666144 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.666250 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.666329 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.666406 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk87h\" (UniqueName: \"kubernetes.io/projected/34ff08b0-f833-4c42-90a7-68af92ba7ce8-kube-api-access-qk87h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.666451 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.673479 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.673666 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.676759 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.677641 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.686976 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk87h\" (UniqueName: \"kubernetes.io/projected/34ff08b0-f833-4c42-90a7-68af92ba7ce8-kube-api-access-qk87h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:54 crc kubenswrapper[4799]: I0121 18:09:54.803093 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:09:55 crc kubenswrapper[4799]: I0121 18:09:55.387730 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx"] Jan 21 18:09:55 crc kubenswrapper[4799]: I0121 18:09:55.394428 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:09:55 crc kubenswrapper[4799]: I0121 18:09:55.971537 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:09:55 crc kubenswrapper[4799]: I0121 18:09:55.971642 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:09:55 crc kubenswrapper[4799]: I0121 18:09:55.971724 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:09:55 crc kubenswrapper[4799]: I0121 18:09:55.972991 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"92f5a81a66f0edf29639d96ce3dd3c3aaf830d422f1a0c852d1348a026575cf5"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:09:55 crc kubenswrapper[4799]: I0121 18:09:55.973121 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://92f5a81a66f0edf29639d96ce3dd3c3aaf830d422f1a0c852d1348a026575cf5" gracePeriod=600 Jan 21 18:09:56 crc kubenswrapper[4799]: I0121 18:09:56.349258 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" event={"ID":"34ff08b0-f833-4c42-90a7-68af92ba7ce8","Type":"ContainerStarted","Data":"eee0d93e6395a9a802d1a72b6fb6b68a2d5a9eecbe8869bd9396b32a6892c1e5"} Jan 21 18:09:56 crc kubenswrapper[4799]: I0121 18:09:56.349592 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" event={"ID":"34ff08b0-f833-4c42-90a7-68af92ba7ce8","Type":"ContainerStarted","Data":"60508bea15b864f1fc7fd50daf304acc36373c3cd79c89c60cc82f725bcb75c6"} Jan 21 18:09:56 crc kubenswrapper[4799]: I0121 18:09:56.351856 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="92f5a81a66f0edf29639d96ce3dd3c3aaf830d422f1a0c852d1348a026575cf5" exitCode=0 Jan 21 18:09:56 crc kubenswrapper[4799]: I0121 18:09:56.351889 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"92f5a81a66f0edf29639d96ce3dd3c3aaf830d422f1a0c852d1348a026575cf5"} Jan 21 18:09:56 crc kubenswrapper[4799]: I0121 18:09:56.351908 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b"} Jan 21 18:09:56 crc kubenswrapper[4799]: I0121 18:09:56.351924 4799 scope.go:117] "RemoveContainer" containerID="5c678dbd43521e1ebec424aa75b79e48ba8e3d74d5fafd08337fc6d5cb8d2861" Jan 21 18:09:56 crc kubenswrapper[4799]: I0121 18:09:56.377415 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" podStartSLOduration=1.9184331110000001 podStartE2EDuration="2.377392403s" podCreationTimestamp="2026-01-21 18:09:54 +0000 UTC" firstStartedPulling="2026-01-21 18:09:55.394105646 +0000 UTC m=+2222.020395669" lastFinishedPulling="2026-01-21 18:09:55.853064898 +0000 UTC m=+2222.479354961" observedRunningTime="2026-01-21 18:09:56.373850184 +0000 UTC m=+2223.000140217" watchObservedRunningTime="2026-01-21 18:09:56.377392403 +0000 UTC m=+2223.003682426" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.765167 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8g9s8"] Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.769405 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.789873 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8g9s8"] Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.802438 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-catalog-content\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.802744 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9mjx\" (UniqueName: \"kubernetes.io/projected/ae0802c3-c05a-4612-8100-2234d3a079ed-kube-api-access-n9mjx\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.802792 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-utilities\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.905754 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9mjx\" (UniqueName: \"kubernetes.io/projected/ae0802c3-c05a-4612-8100-2234d3a079ed-kube-api-access-n9mjx\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.905816 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-utilities\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.905907 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-catalog-content\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.906455 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-utilities\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.906568 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-catalog-content\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:26 crc kubenswrapper[4799]: I0121 18:11:26.932261 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9mjx\" (UniqueName: \"kubernetes.io/projected/ae0802c3-c05a-4612-8100-2234d3a079ed-kube-api-access-n9mjx\") pod \"community-operators-8g9s8\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:27 crc kubenswrapper[4799]: I0121 18:11:27.096987 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:27 crc kubenswrapper[4799]: I0121 18:11:27.687878 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8g9s8"] Jan 21 18:11:28 crc kubenswrapper[4799]: I0121 18:11:28.413313 4799 generic.go:334] "Generic (PLEG): container finished" podID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerID="4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1" exitCode=0 Jan 21 18:11:28 crc kubenswrapper[4799]: I0121 18:11:28.413407 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g9s8" event={"ID":"ae0802c3-c05a-4612-8100-2234d3a079ed","Type":"ContainerDied","Data":"4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1"} Jan 21 18:11:28 crc kubenswrapper[4799]: I0121 18:11:28.413581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g9s8" event={"ID":"ae0802c3-c05a-4612-8100-2234d3a079ed","Type":"ContainerStarted","Data":"4e642e6b33aaffa161449106f0924acf754fd86265d8d8fe8a128da83452f002"} Jan 21 18:11:30 crc kubenswrapper[4799]: I0121 18:11:30.433121 4799 generic.go:334] "Generic (PLEG): container finished" podID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerID="888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7" exitCode=0 Jan 21 18:11:30 crc kubenswrapper[4799]: I0121 18:11:30.433420 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g9s8" event={"ID":"ae0802c3-c05a-4612-8100-2234d3a079ed","Type":"ContainerDied","Data":"888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7"} Jan 21 18:11:31 crc kubenswrapper[4799]: I0121 18:11:31.445893 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g9s8" event={"ID":"ae0802c3-c05a-4612-8100-2234d3a079ed","Type":"ContainerStarted","Data":"fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2"} Jan 21 18:11:31 crc kubenswrapper[4799]: I0121 18:11:31.482297 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8g9s8" podStartSLOduration=3.047085193 podStartE2EDuration="5.482275427s" podCreationTimestamp="2026-01-21 18:11:26 +0000 UTC" firstStartedPulling="2026-01-21 18:11:28.416180016 +0000 UTC m=+2315.042470049" lastFinishedPulling="2026-01-21 18:11:30.85137025 +0000 UTC m=+2317.477660283" observedRunningTime="2026-01-21 18:11:31.472475884 +0000 UTC m=+2318.098765937" watchObservedRunningTime="2026-01-21 18:11:31.482275427 +0000 UTC m=+2318.108565460" Jan 21 18:11:37 crc kubenswrapper[4799]: I0121 18:11:37.097724 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:37 crc kubenswrapper[4799]: I0121 18:11:37.098392 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:37 crc kubenswrapper[4799]: I0121 18:11:37.154523 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:37 crc kubenswrapper[4799]: I0121 18:11:37.573819 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:37 crc kubenswrapper[4799]: I0121 18:11:37.622052 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8g9s8"] Jan 21 18:11:39 crc kubenswrapper[4799]: I0121 18:11:39.529739 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8g9s8" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerName="registry-server" containerID="cri-o://fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2" gracePeriod=2 Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.007409 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.204456 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-utilities\") pod \"ae0802c3-c05a-4612-8100-2234d3a079ed\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.205402 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9mjx\" (UniqueName: \"kubernetes.io/projected/ae0802c3-c05a-4612-8100-2234d3a079ed-kube-api-access-n9mjx\") pod \"ae0802c3-c05a-4612-8100-2234d3a079ed\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.205490 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-catalog-content\") pod \"ae0802c3-c05a-4612-8100-2234d3a079ed\" (UID: \"ae0802c3-c05a-4612-8100-2234d3a079ed\") " Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.205563 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-utilities" (OuterVolumeSpecName: "utilities") pod "ae0802c3-c05a-4612-8100-2234d3a079ed" (UID: "ae0802c3-c05a-4612-8100-2234d3a079ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.206727 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.214498 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae0802c3-c05a-4612-8100-2234d3a079ed-kube-api-access-n9mjx" (OuterVolumeSpecName: "kube-api-access-n9mjx") pod "ae0802c3-c05a-4612-8100-2234d3a079ed" (UID: "ae0802c3-c05a-4612-8100-2234d3a079ed"). InnerVolumeSpecName "kube-api-access-n9mjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.259372 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae0802c3-c05a-4612-8100-2234d3a079ed" (UID: "ae0802c3-c05a-4612-8100-2234d3a079ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.309223 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9mjx\" (UniqueName: \"kubernetes.io/projected/ae0802c3-c05a-4612-8100-2234d3a079ed-kube-api-access-n9mjx\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.309446 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae0802c3-c05a-4612-8100-2234d3a079ed-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.545032 4799 generic.go:334] "Generic (PLEG): container finished" podID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerID="fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2" exitCode=0 Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.545100 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g9s8" event={"ID":"ae0802c3-c05a-4612-8100-2234d3a079ed","Type":"ContainerDied","Data":"fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2"} Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.545178 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8g9s8" event={"ID":"ae0802c3-c05a-4612-8100-2234d3a079ed","Type":"ContainerDied","Data":"4e642e6b33aaffa161449106f0924acf754fd86265d8d8fe8a128da83452f002"} Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.545218 4799 scope.go:117] "RemoveContainer" containerID="fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.546538 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8g9s8" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.568557 4799 scope.go:117] "RemoveContainer" containerID="888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.594013 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8g9s8"] Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.603000 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8g9s8"] Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.619142 4799 scope.go:117] "RemoveContainer" containerID="4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.652246 4799 scope.go:117] "RemoveContainer" containerID="fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2" Jan 21 18:11:40 crc kubenswrapper[4799]: E0121 18:11:40.652837 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2\": container with ID starting with fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2 not found: ID does not exist" containerID="fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.652896 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2"} err="failed to get container status \"fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2\": rpc error: code = NotFound desc = could not find container \"fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2\": container with ID starting with fc48f681ae04bd1a3bb8a32f27aba3a09b91bc81e0ca1c51ec28d63f778c94d2 not found: ID does not exist" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.652937 4799 scope.go:117] "RemoveContainer" containerID="888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7" Jan 21 18:11:40 crc kubenswrapper[4799]: E0121 18:11:40.653330 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7\": container with ID starting with 888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7 not found: ID does not exist" containerID="888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.653366 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7"} err="failed to get container status \"888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7\": rpc error: code = NotFound desc = could not find container \"888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7\": container with ID starting with 888fa5ac112001d789f4a1d330fa9bc5f6c747174b068cb9de5f6805f32912d7 not found: ID does not exist" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.653386 4799 scope.go:117] "RemoveContainer" containerID="4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1" Jan 21 18:11:40 crc kubenswrapper[4799]: E0121 18:11:40.653677 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1\": container with ID starting with 4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1 not found: ID does not exist" containerID="4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1" Jan 21 18:11:40 crc kubenswrapper[4799]: I0121 18:11:40.653707 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1"} err="failed to get container status \"4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1\": rpc error: code = NotFound desc = could not find container \"4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1\": container with ID starting with 4304592b2f1500c86e6774e4362d2c2ae0afd2a77af71816e135adcac76bc7a1 not found: ID does not exist" Jan 21 18:11:42 crc kubenswrapper[4799]: I0121 18:11:42.223909 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" path="/var/lib/kubelet/pods/ae0802c3-c05a-4612-8100-2234d3a079ed/volumes" Jan 21 18:12:25 crc kubenswrapper[4799]: I0121 18:12:25.970519 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:12:25 crc kubenswrapper[4799]: I0121 18:12:25.971212 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:12:56 crc kubenswrapper[4799]: I0121 18:12:56.099185 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:12:56 crc kubenswrapper[4799]: I0121 18:12:56.099616 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:13:25 crc kubenswrapper[4799]: I0121 18:13:25.970689 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:13:25 crc kubenswrapper[4799]: I0121 18:13:25.971386 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:13:25 crc kubenswrapper[4799]: I0121 18:13:25.971470 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:13:25 crc kubenswrapper[4799]: I0121 18:13:25.972375 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:13:25 crc kubenswrapper[4799]: I0121 18:13:25.972466 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" gracePeriod=600 Jan 21 18:13:26 crc kubenswrapper[4799]: E0121 18:13:26.096963 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:13:26 crc kubenswrapper[4799]: I0121 18:13:26.105322 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" exitCode=0 Jan 21 18:13:26 crc kubenswrapper[4799]: I0121 18:13:26.105406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b"} Jan 21 18:13:26 crc kubenswrapper[4799]: I0121 18:13:26.105508 4799 scope.go:117] "RemoveContainer" containerID="92f5a81a66f0edf29639d96ce3dd3c3aaf830d422f1a0c852d1348a026575cf5" Jan 21 18:13:26 crc kubenswrapper[4799]: I0121 18:13:26.106420 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:13:26 crc kubenswrapper[4799]: E0121 18:13:26.106778 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:13:40 crc kubenswrapper[4799]: I0121 18:13:40.206035 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:13:40 crc kubenswrapper[4799]: E0121 18:13:40.206989 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:13:52 crc kubenswrapper[4799]: I0121 18:13:52.205492 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:13:52 crc kubenswrapper[4799]: E0121 18:13:52.206378 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:14:04 crc kubenswrapper[4799]: I0121 18:14:04.211984 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:14:04 crc kubenswrapper[4799]: E0121 18:14:04.212847 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:14:19 crc kubenswrapper[4799]: I0121 18:14:19.205569 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:14:19 crc kubenswrapper[4799]: E0121 18:14:19.206627 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:14:23 crc kubenswrapper[4799]: I0121 18:14:23.103693 4799 generic.go:334] "Generic (PLEG): container finished" podID="34ff08b0-f833-4c42-90a7-68af92ba7ce8" containerID="eee0d93e6395a9a802d1a72b6fb6b68a2d5a9eecbe8869bd9396b32a6892c1e5" exitCode=0 Jan 21 18:14:23 crc kubenswrapper[4799]: I0121 18:14:23.103791 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" event={"ID":"34ff08b0-f833-4c42-90a7-68af92ba7ce8","Type":"ContainerDied","Data":"eee0d93e6395a9a802d1a72b6fb6b68a2d5a9eecbe8869bd9396b32a6892c1e5"} Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.720262 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.846028 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-ssh-key-openstack-edpm-ipam\") pod \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.846383 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-inventory\") pod \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.846487 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qk87h\" (UniqueName: \"kubernetes.io/projected/34ff08b0-f833-4c42-90a7-68af92ba7ce8-kube-api-access-qk87h\") pod \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.846514 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-secret-0\") pod \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.846646 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-combined-ca-bundle\") pod \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\" (UID: \"34ff08b0-f833-4c42-90a7-68af92ba7ce8\") " Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.853948 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "34ff08b0-f833-4c42-90a7-68af92ba7ce8" (UID: "34ff08b0-f833-4c42-90a7-68af92ba7ce8"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.863339 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34ff08b0-f833-4c42-90a7-68af92ba7ce8-kube-api-access-qk87h" (OuterVolumeSpecName: "kube-api-access-qk87h") pod "34ff08b0-f833-4c42-90a7-68af92ba7ce8" (UID: "34ff08b0-f833-4c42-90a7-68af92ba7ce8"). InnerVolumeSpecName "kube-api-access-qk87h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.876964 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "34ff08b0-f833-4c42-90a7-68af92ba7ce8" (UID: "34ff08b0-f833-4c42-90a7-68af92ba7ce8"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.881299 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-inventory" (OuterVolumeSpecName: "inventory") pod "34ff08b0-f833-4c42-90a7-68af92ba7ce8" (UID: "34ff08b0-f833-4c42-90a7-68af92ba7ce8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.882175 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "34ff08b0-f833-4c42-90a7-68af92ba7ce8" (UID: "34ff08b0-f833-4c42-90a7-68af92ba7ce8"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.949230 4799 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.949280 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.949294 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.949306 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qk87h\" (UniqueName: \"kubernetes.io/projected/34ff08b0-f833-4c42-90a7-68af92ba7ce8-kube-api-access-qk87h\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:24 crc kubenswrapper[4799]: I0121 18:14:24.949319 4799 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/34ff08b0-f833-4c42-90a7-68af92ba7ce8-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.123800 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" event={"ID":"34ff08b0-f833-4c42-90a7-68af92ba7ce8","Type":"ContainerDied","Data":"60508bea15b864f1fc7fd50daf304acc36373c3cd79c89c60cc82f725bcb75c6"} Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.123840 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60508bea15b864f1fc7fd50daf304acc36373c3cd79c89c60cc82f725bcb75c6" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.123847 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.234195 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc"] Jan 21 18:14:25 crc kubenswrapper[4799]: E0121 18:14:25.234742 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerName="registry-server" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.234774 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerName="registry-server" Jan 21 18:14:25 crc kubenswrapper[4799]: E0121 18:14:25.234802 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34ff08b0-f833-4c42-90a7-68af92ba7ce8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.234814 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="34ff08b0-f833-4c42-90a7-68af92ba7ce8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 21 18:14:25 crc kubenswrapper[4799]: E0121 18:14:25.234841 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerName="extract-utilities" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.234849 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerName="extract-utilities" Jan 21 18:14:25 crc kubenswrapper[4799]: E0121 18:14:25.234870 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerName="extract-content" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.234878 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerName="extract-content" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.235151 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="34ff08b0-f833-4c42-90a7-68af92ba7ce8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.235168 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0802c3-c05a-4612-8100-2234d3a079ed" containerName="registry-server" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.236120 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.238836 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.239278 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.239323 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.239446 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.239509 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.239632 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.239683 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.245806 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc"] Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.360528 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.360644 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.361564 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw9vh\" (UniqueName: \"kubernetes.io/projected/9f3a320d-5663-4a3d-adb0-fa58906a7a91-kube-api-access-gw9vh\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.361670 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.361772 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.361858 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.362219 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.362259 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.362357 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.464116 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.464224 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw9vh\" (UniqueName: \"kubernetes.io/projected/9f3a320d-5663-4a3d-adb0-fa58906a7a91-kube-api-access-gw9vh\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.464259 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.464291 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.464323 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.464384 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.464417 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.464464 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.467426 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.467630 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.470120 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.470226 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.470306 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.471140 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.471771 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.473854 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.478703 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.485053 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw9vh\" (UniqueName: \"kubernetes.io/projected/9f3a320d-5663-4a3d-adb0-fa58906a7a91-kube-api-access-gw9vh\") pod \"nova-edpm-deployment-openstack-edpm-ipam-bs6xc\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:25 crc kubenswrapper[4799]: I0121 18:14:25.554575 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:14:26 crc kubenswrapper[4799]: I0121 18:14:26.118594 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc"] Jan 21 18:14:26 crc kubenswrapper[4799]: I0121 18:14:26.138248 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" event={"ID":"9f3a320d-5663-4a3d-adb0-fa58906a7a91","Type":"ContainerStarted","Data":"22bc30ad8411f1df61c52683cb7d06e980906e7e3ca4d056a3fdae6e2ad01175"} Jan 21 18:14:27 crc kubenswrapper[4799]: I0121 18:14:27.149743 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" event={"ID":"9f3a320d-5663-4a3d-adb0-fa58906a7a91","Type":"ContainerStarted","Data":"0260fa9e12c45d22a9d3ba8e46cc2ac63374e3243fd651b414485fcec7c66390"} Jan 21 18:14:27 crc kubenswrapper[4799]: I0121 18:14:27.182148 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" podStartSLOduration=1.666130619 podStartE2EDuration="2.18207391s" podCreationTimestamp="2026-01-21 18:14:25 +0000 UTC" firstStartedPulling="2026-01-21 18:14:26.128302485 +0000 UTC m=+2492.754592508" lastFinishedPulling="2026-01-21 18:14:26.644245776 +0000 UTC m=+2493.270535799" observedRunningTime="2026-01-21 18:14:27.174314463 +0000 UTC m=+2493.800604486" watchObservedRunningTime="2026-01-21 18:14:27.18207391 +0000 UTC m=+2493.808363933" Jan 21 18:14:30 crc kubenswrapper[4799]: I0121 18:14:30.205063 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:14:30 crc kubenswrapper[4799]: E0121 18:14:30.205906 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:14:43 crc kubenswrapper[4799]: I0121 18:14:43.206005 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:14:43 crc kubenswrapper[4799]: E0121 18:14:43.207273 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:14:58 crc kubenswrapper[4799]: I0121 18:14:58.205583 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:14:58 crc kubenswrapper[4799]: E0121 18:14:58.206657 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.154417 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf"] Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.157068 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.159912 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.160725 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbmcj\" (UniqueName: \"kubernetes.io/projected/c23b2345-2c6a-44dc-9cb6-a9a53981011c-kube-api-access-tbmcj\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.160880 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c23b2345-2c6a-44dc-9cb6-a9a53981011c-secret-volume\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.160965 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.161436 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c23b2345-2c6a-44dc-9cb6-a9a53981011c-config-volume\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.166552 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf"] Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.263861 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c23b2345-2c6a-44dc-9cb6-a9a53981011c-config-volume\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.263969 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbmcj\" (UniqueName: \"kubernetes.io/projected/c23b2345-2c6a-44dc-9cb6-a9a53981011c-kube-api-access-tbmcj\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.264011 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c23b2345-2c6a-44dc-9cb6-a9a53981011c-secret-volume\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.265287 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c23b2345-2c6a-44dc-9cb6-a9a53981011c-config-volume\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.269840 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c23b2345-2c6a-44dc-9cb6-a9a53981011c-secret-volume\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.280632 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbmcj\" (UniqueName: \"kubernetes.io/projected/c23b2345-2c6a-44dc-9cb6-a9a53981011c-kube-api-access-tbmcj\") pod \"collect-profiles-29483655-5h8wf\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.491811 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:00 crc kubenswrapper[4799]: I0121 18:15:00.944358 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf"] Jan 21 18:15:01 crc kubenswrapper[4799]: I0121 18:15:01.581438 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" event={"ID":"c23b2345-2c6a-44dc-9cb6-a9a53981011c","Type":"ContainerStarted","Data":"7490a6b21ca06b5b208fdf5be82ebdd0189dd80ac532993b7537d8b4f958dda2"} Jan 21 18:15:01 crc kubenswrapper[4799]: I0121 18:15:01.581829 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" event={"ID":"c23b2345-2c6a-44dc-9cb6-a9a53981011c","Type":"ContainerStarted","Data":"24353e43fc20f25c78331da9156b949f084acfa9565fb3627b61b357ddadb37a"} Jan 21 18:15:01 crc kubenswrapper[4799]: I0121 18:15:01.620679 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" podStartSLOduration=1.62064743 podStartE2EDuration="1.62064743s" podCreationTimestamp="2026-01-21 18:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:15:01.610914638 +0000 UTC m=+2528.237204661" watchObservedRunningTime="2026-01-21 18:15:01.62064743 +0000 UTC m=+2528.246937453" Jan 21 18:15:02 crc kubenswrapper[4799]: I0121 18:15:02.596050 4799 generic.go:334] "Generic (PLEG): container finished" podID="c23b2345-2c6a-44dc-9cb6-a9a53981011c" containerID="7490a6b21ca06b5b208fdf5be82ebdd0189dd80ac532993b7537d8b4f958dda2" exitCode=0 Jan 21 18:15:02 crc kubenswrapper[4799]: I0121 18:15:02.596191 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" event={"ID":"c23b2345-2c6a-44dc-9cb6-a9a53981011c","Type":"ContainerDied","Data":"7490a6b21ca06b5b208fdf5be82ebdd0189dd80ac532993b7537d8b4f958dda2"} Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.065038 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.098562 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbmcj\" (UniqueName: \"kubernetes.io/projected/c23b2345-2c6a-44dc-9cb6-a9a53981011c-kube-api-access-tbmcj\") pod \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.098907 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c23b2345-2c6a-44dc-9cb6-a9a53981011c-secret-volume\") pod \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.099022 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c23b2345-2c6a-44dc-9cb6-a9a53981011c-config-volume\") pod \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\" (UID: \"c23b2345-2c6a-44dc-9cb6-a9a53981011c\") " Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.101747 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c23b2345-2c6a-44dc-9cb6-a9a53981011c-config-volume" (OuterVolumeSpecName: "config-volume") pod "c23b2345-2c6a-44dc-9cb6-a9a53981011c" (UID: "c23b2345-2c6a-44dc-9cb6-a9a53981011c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.108203 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c23b2345-2c6a-44dc-9cb6-a9a53981011c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c23b2345-2c6a-44dc-9cb6-a9a53981011c" (UID: "c23b2345-2c6a-44dc-9cb6-a9a53981011c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.108543 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c23b2345-2c6a-44dc-9cb6-a9a53981011c-kube-api-access-tbmcj" (OuterVolumeSpecName: "kube-api-access-tbmcj") pod "c23b2345-2c6a-44dc-9cb6-a9a53981011c" (UID: "c23b2345-2c6a-44dc-9cb6-a9a53981011c"). InnerVolumeSpecName "kube-api-access-tbmcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.201867 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c23b2345-2c6a-44dc-9cb6-a9a53981011c-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.201929 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbmcj\" (UniqueName: \"kubernetes.io/projected/c23b2345-2c6a-44dc-9cb6-a9a53981011c-kube-api-access-tbmcj\") on node \"crc\" DevicePath \"\"" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.201944 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c23b2345-2c6a-44dc-9cb6-a9a53981011c-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.626480 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" event={"ID":"c23b2345-2c6a-44dc-9cb6-a9a53981011c","Type":"ContainerDied","Data":"24353e43fc20f25c78331da9156b949f084acfa9565fb3627b61b357ddadb37a"} Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.626872 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24353e43fc20f25c78331da9156b949f084acfa9565fb3627b61b357ddadb37a" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.626917 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf" Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.705548 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw"] Jan 21 18:15:04 crc kubenswrapper[4799]: I0121 18:15:04.719084 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483610-mgbzw"] Jan 21 18:15:06 crc kubenswrapper[4799]: I0121 18:15:06.221064 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e82402a-bf1e-418b-9ec3-7723300db21b" path="/var/lib/kubelet/pods/9e82402a-bf1e-418b-9ec3-7723300db21b/volumes" Jan 21 18:15:11 crc kubenswrapper[4799]: I0121 18:15:11.205713 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:15:11 crc kubenswrapper[4799]: E0121 18:15:11.206527 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:15:26 crc kubenswrapper[4799]: I0121 18:15:26.204901 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:15:26 crc kubenswrapper[4799]: E0121 18:15:26.206447 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:15:38 crc kubenswrapper[4799]: I0121 18:15:38.205375 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:15:38 crc kubenswrapper[4799]: E0121 18:15:38.205970 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:15:39 crc kubenswrapper[4799]: I0121 18:15:39.952287 4799 scope.go:117] "RemoveContainer" containerID="0cf76a27aa3c02482e1617a07e38ce83eb19ab44881e39186665de44ba8a0023" Jan 21 18:15:49 crc kubenswrapper[4799]: I0121 18:15:49.208898 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:15:49 crc kubenswrapper[4799]: E0121 18:15:49.210198 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:16:02 crc kubenswrapper[4799]: I0121 18:16:02.205765 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:16:02 crc kubenswrapper[4799]: E0121 18:16:02.206723 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:16:14 crc kubenswrapper[4799]: I0121 18:16:14.212774 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:16:14 crc kubenswrapper[4799]: E0121 18:16:14.213963 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:16:26 crc kubenswrapper[4799]: I0121 18:16:26.205259 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:16:26 crc kubenswrapper[4799]: E0121 18:16:26.206550 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:16:38 crc kubenswrapper[4799]: I0121 18:16:38.205694 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:16:38 crc kubenswrapper[4799]: E0121 18:16:38.206926 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:16:51 crc kubenswrapper[4799]: I0121 18:16:51.205834 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:16:51 crc kubenswrapper[4799]: E0121 18:16:51.206852 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:17:01 crc kubenswrapper[4799]: I0121 18:17:01.945359 4799 generic.go:334] "Generic (PLEG): container finished" podID="9f3a320d-5663-4a3d-adb0-fa58906a7a91" containerID="0260fa9e12c45d22a9d3ba8e46cc2ac63374e3243fd651b414485fcec7c66390" exitCode=0 Jan 21 18:17:01 crc kubenswrapper[4799]: I0121 18:17:01.945924 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" event={"ID":"9f3a320d-5663-4a3d-adb0-fa58906a7a91","Type":"ContainerDied","Data":"0260fa9e12c45d22a9d3ba8e46cc2ac63374e3243fd651b414485fcec7c66390"} Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.452091 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.467992 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-extra-config-0\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.468070 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-1\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.468150 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-inventory\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.468229 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-0\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.468264 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-1\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.468314 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-combined-ca-bundle\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.468458 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw9vh\" (UniqueName: \"kubernetes.io/projected/9f3a320d-5663-4a3d-adb0-fa58906a7a91-kube-api-access-gw9vh\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.468503 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-ssh-key-openstack-edpm-ipam\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.468616 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-0\") pod \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\" (UID: \"9f3a320d-5663-4a3d-adb0-fa58906a7a91\") " Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.518738 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f3a320d-5663-4a3d-adb0-fa58906a7a91-kube-api-access-gw9vh" (OuterVolumeSpecName: "kube-api-access-gw9vh") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "kube-api-access-gw9vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.520904 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.521313 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-inventory" (OuterVolumeSpecName: "inventory") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.521567 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.522379 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.525379 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.526477 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.530491 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.538492 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "9f3a320d-5663-4a3d-adb0-fa58906a7a91" (UID: "9f3a320d-5663-4a3d-adb0-fa58906a7a91"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572295 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572341 4799 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572359 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572371 4799 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572383 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw9vh\" (UniqueName: \"kubernetes.io/projected/9f3a320d-5663-4a3d-adb0-fa58906a7a91-kube-api-access-gw9vh\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572394 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572408 4799 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572420 4799 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.572430 4799 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9f3a320d-5663-4a3d-adb0-fa58906a7a91-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.969465 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" event={"ID":"9f3a320d-5663-4a3d-adb0-fa58906a7a91","Type":"ContainerDied","Data":"22bc30ad8411f1df61c52683cb7d06e980906e7e3ca4d056a3fdae6e2ad01175"} Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.969511 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22bc30ad8411f1df61c52683cb7d06e980906e7e3ca4d056a3fdae6e2ad01175" Jan 21 18:17:03 crc kubenswrapper[4799]: I0121 18:17:03.969545 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-bs6xc" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.086795 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn"] Jan 21 18:17:04 crc kubenswrapper[4799]: E0121 18:17:04.087372 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c23b2345-2c6a-44dc-9cb6-a9a53981011c" containerName="collect-profiles" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.087398 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c23b2345-2c6a-44dc-9cb6-a9a53981011c" containerName="collect-profiles" Jan 21 18:17:04 crc kubenswrapper[4799]: E0121 18:17:04.087450 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f3a320d-5663-4a3d-adb0-fa58906a7a91" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.087459 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f3a320d-5663-4a3d-adb0-fa58906a7a91" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.087707 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c23b2345-2c6a-44dc-9cb6-a9a53981011c" containerName="collect-profiles" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.087753 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f3a320d-5663-4a3d-adb0-fa58906a7a91" containerName="nova-edpm-deployment-openstack-edpm-ipam" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.088672 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.092342 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.092678 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.092870 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.093040 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-p22hr" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.097438 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.104581 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn"] Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.214849 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:17:04 crc kubenswrapper[4799]: E0121 18:17:04.215381 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.289374 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.289466 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.289967 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.290508 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.290784 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l99f2\" (UniqueName: \"kubernetes.io/projected/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-kube-api-access-l99f2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.291000 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.291092 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.393493 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.393730 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.393833 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l99f2\" (UniqueName: \"kubernetes.io/projected/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-kube-api-access-l99f2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.393891 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.393945 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.394017 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.394041 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.398197 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.398223 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.398543 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.399261 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.411019 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.412496 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.418253 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l99f2\" (UniqueName: \"kubernetes.io/projected/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-kube-api-access-l99f2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:04 crc kubenswrapper[4799]: I0121 18:17:04.713197 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:17:05 crc kubenswrapper[4799]: I0121 18:17:05.263905 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn"] Jan 21 18:17:05 crc kubenswrapper[4799]: I0121 18:17:05.266741 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:17:05 crc kubenswrapper[4799]: I0121 18:17:05.994588 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" event={"ID":"b5f5c54c-325e-4640-8cb5-5f8ac5c91234","Type":"ContainerStarted","Data":"06018f872981612708747689e2e5aaf8afc2dd28c1a62fe0d22665e9546efe26"} Jan 21 18:17:05 crc kubenswrapper[4799]: I0121 18:17:05.994936 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" event={"ID":"b5f5c54c-325e-4640-8cb5-5f8ac5c91234","Type":"ContainerStarted","Data":"da180848a588d3203216dc003363d2eb9fb3a18e25234f834afc149642dcc227"} Jan 21 18:17:06 crc kubenswrapper[4799]: I0121 18:17:06.037992 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" podStartSLOduration=1.61007878 podStartE2EDuration="2.037833395s" podCreationTimestamp="2026-01-21 18:17:04 +0000 UTC" firstStartedPulling="2026-01-21 18:17:05.266499539 +0000 UTC m=+2651.892789552" lastFinishedPulling="2026-01-21 18:17:05.694254144 +0000 UTC m=+2652.320544167" observedRunningTime="2026-01-21 18:17:06.009610405 +0000 UTC m=+2652.635900428" watchObservedRunningTime="2026-01-21 18:17:06.037833395 +0000 UTC m=+2652.664123418" Jan 21 18:17:15 crc kubenswrapper[4799]: I0121 18:17:15.206268 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:17:15 crc kubenswrapper[4799]: E0121 18:17:15.207267 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:17:26 crc kubenswrapper[4799]: I0121 18:17:26.206396 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:17:26 crc kubenswrapper[4799]: E0121 18:17:26.207418 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:17:40 crc kubenswrapper[4799]: I0121 18:17:40.205016 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:17:40 crc kubenswrapper[4799]: E0121 18:17:40.205981 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.145977 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4snkh"] Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.179606 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4snkh"] Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.188318 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.214482 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:17:53 crc kubenswrapper[4799]: E0121 18:17:53.225627 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.337705 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-utilities\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.338048 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98z7p\" (UniqueName: \"kubernetes.io/projected/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-kube-api-access-98z7p\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.338180 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-catalog-content\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.441493 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-utilities\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.441686 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98z7p\" (UniqueName: \"kubernetes.io/projected/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-kube-api-access-98z7p\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.441774 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-catalog-content\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.442746 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-utilities\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.442902 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-catalog-content\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.482465 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98z7p\" (UniqueName: \"kubernetes.io/projected/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-kube-api-access-98z7p\") pod \"redhat-marketplace-4snkh\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:53 crc kubenswrapper[4799]: I0121 18:17:53.533269 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:17:54 crc kubenswrapper[4799]: I0121 18:17:54.084844 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4snkh"] Jan 21 18:17:54 crc kubenswrapper[4799]: I0121 18:17:54.464931 4799 generic.go:334] "Generic (PLEG): container finished" podID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerID="1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619" exitCode=0 Jan 21 18:17:54 crc kubenswrapper[4799]: I0121 18:17:54.464989 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4snkh" event={"ID":"d0b61517-d3bb-44d4-93d6-16b23f6fa47f","Type":"ContainerDied","Data":"1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619"} Jan 21 18:17:54 crc kubenswrapper[4799]: I0121 18:17:54.465023 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4snkh" event={"ID":"d0b61517-d3bb-44d4-93d6-16b23f6fa47f","Type":"ContainerStarted","Data":"042a6ae7dbba748bfde83352b2542831c1864ac9bba19aa142bad3506e2ea890"} Jan 21 18:17:56 crc kubenswrapper[4799]: I0121 18:17:56.488024 4799 generic.go:334] "Generic (PLEG): container finished" podID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerID="e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64" exitCode=0 Jan 21 18:17:56 crc kubenswrapper[4799]: I0121 18:17:56.488139 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4snkh" event={"ID":"d0b61517-d3bb-44d4-93d6-16b23f6fa47f","Type":"ContainerDied","Data":"e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64"} Jan 21 18:17:57 crc kubenswrapper[4799]: I0121 18:17:57.500649 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4snkh" event={"ID":"d0b61517-d3bb-44d4-93d6-16b23f6fa47f","Type":"ContainerStarted","Data":"b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520"} Jan 21 18:17:57 crc kubenswrapper[4799]: I0121 18:17:57.525893 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4snkh" podStartSLOduration=1.909796958 podStartE2EDuration="4.525870892s" podCreationTimestamp="2026-01-21 18:17:53 +0000 UTC" firstStartedPulling="2026-01-21 18:17:54.469310037 +0000 UTC m=+2701.095600060" lastFinishedPulling="2026-01-21 18:17:57.085383971 +0000 UTC m=+2703.711673994" observedRunningTime="2026-01-21 18:17:57.518054664 +0000 UTC m=+2704.144344697" watchObservedRunningTime="2026-01-21 18:17:57.525870892 +0000 UTC m=+2704.152160915" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.511481 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c55l8"] Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.514383 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.526957 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c55l8"] Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.535194 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.535528 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.596724 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.646692 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2ltf\" (UniqueName: \"kubernetes.io/projected/ce39fa9a-30c5-4236-807e-be93cdb96eff-kube-api-access-b2ltf\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.646831 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-utilities\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.646912 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-catalog-content\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.748605 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-catalog-content\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.748808 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2ltf\" (UniqueName: \"kubernetes.io/projected/ce39fa9a-30c5-4236-807e-be93cdb96eff-kube-api-access-b2ltf\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.748908 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-utilities\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.749486 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-catalog-content\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.749499 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-utilities\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.775237 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2ltf\" (UniqueName: \"kubernetes.io/projected/ce39fa9a-30c5-4236-807e-be93cdb96eff-kube-api-access-b2ltf\") pod \"redhat-operators-c55l8\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:03 crc kubenswrapper[4799]: I0121 18:18:03.836762 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:04 crc kubenswrapper[4799]: I0121 18:18:04.326341 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c55l8"] Jan 21 18:18:04 crc kubenswrapper[4799]: I0121 18:18:04.634711 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c55l8" event={"ID":"ce39fa9a-30c5-4236-807e-be93cdb96eff","Type":"ContainerStarted","Data":"9f0df0048e1601d098831cfbbf2cbc51ff4c3a6fbe24f719ffc8149a685b0e04"} Jan 21 18:18:04 crc kubenswrapper[4799]: I0121 18:18:04.698085 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:18:05 crc kubenswrapper[4799]: I0121 18:18:05.647823 4799 generic.go:334] "Generic (PLEG): container finished" podID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerID="ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f" exitCode=0 Jan 21 18:18:05 crc kubenswrapper[4799]: I0121 18:18:05.647905 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c55l8" event={"ID":"ce39fa9a-30c5-4236-807e-be93cdb96eff","Type":"ContainerDied","Data":"ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f"} Jan 21 18:18:05 crc kubenswrapper[4799]: I0121 18:18:05.890338 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4snkh"] Jan 21 18:18:06 crc kubenswrapper[4799]: I0121 18:18:06.671237 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c55l8" event={"ID":"ce39fa9a-30c5-4236-807e-be93cdb96eff","Type":"ContainerStarted","Data":"ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562"} Jan 21 18:18:06 crc kubenswrapper[4799]: I0121 18:18:06.671536 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4snkh" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerName="registry-server" containerID="cri-o://b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520" gracePeriod=2 Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.205659 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:18:07 crc kubenswrapper[4799]: E0121 18:18:07.206337 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.348354 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.540253 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-catalog-content\") pod \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.540600 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-utilities\") pod \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.540648 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98z7p\" (UniqueName: \"kubernetes.io/projected/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-kube-api-access-98z7p\") pod \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\" (UID: \"d0b61517-d3bb-44d4-93d6-16b23f6fa47f\") " Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.541557 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-utilities" (OuterVolumeSpecName: "utilities") pod "d0b61517-d3bb-44d4-93d6-16b23f6fa47f" (UID: "d0b61517-d3bb-44d4-93d6-16b23f6fa47f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.561666 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-kube-api-access-98z7p" (OuterVolumeSpecName: "kube-api-access-98z7p") pod "d0b61517-d3bb-44d4-93d6-16b23f6fa47f" (UID: "d0b61517-d3bb-44d4-93d6-16b23f6fa47f"). InnerVolumeSpecName "kube-api-access-98z7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.563508 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d0b61517-d3bb-44d4-93d6-16b23f6fa47f" (UID: "d0b61517-d3bb-44d4-93d6-16b23f6fa47f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.644026 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.644098 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.644111 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98z7p\" (UniqueName: \"kubernetes.io/projected/d0b61517-d3bb-44d4-93d6-16b23f6fa47f-kube-api-access-98z7p\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.685909 4799 generic.go:334] "Generic (PLEG): container finished" podID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerID="b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520" exitCode=0 Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.685990 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4snkh" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.686004 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4snkh" event={"ID":"d0b61517-d3bb-44d4-93d6-16b23f6fa47f","Type":"ContainerDied","Data":"b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520"} Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.686087 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4snkh" event={"ID":"d0b61517-d3bb-44d4-93d6-16b23f6fa47f","Type":"ContainerDied","Data":"042a6ae7dbba748bfde83352b2542831c1864ac9bba19aa142bad3506e2ea890"} Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.686113 4799 scope.go:117] "RemoveContainer" containerID="b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.728417 4799 scope.go:117] "RemoveContainer" containerID="e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.733849 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4snkh"] Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.762119 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4snkh"] Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.767564 4799 scope.go:117] "RemoveContainer" containerID="1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.837957 4799 scope.go:117] "RemoveContainer" containerID="b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520" Jan 21 18:18:07 crc kubenswrapper[4799]: E0121 18:18:07.839326 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520\": container with ID starting with b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520 not found: ID does not exist" containerID="b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.839915 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520"} err="failed to get container status \"b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520\": rpc error: code = NotFound desc = could not find container \"b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520\": container with ID starting with b84792336d5ef844a6b4c77e69284a8b2427f2893a9782e627b46d839a5e6520 not found: ID does not exist" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.840114 4799 scope.go:117] "RemoveContainer" containerID="e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64" Jan 21 18:18:07 crc kubenswrapper[4799]: E0121 18:18:07.840966 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64\": container with ID starting with e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64 not found: ID does not exist" containerID="e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.841021 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64"} err="failed to get container status \"e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64\": rpc error: code = NotFound desc = could not find container \"e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64\": container with ID starting with e42f170cd114e3958a8246f8c9f3771c41aff37895eff689c60f9730157ddb64 not found: ID does not exist" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.841064 4799 scope.go:117] "RemoveContainer" containerID="1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619" Jan 21 18:18:07 crc kubenswrapper[4799]: E0121 18:18:07.841573 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619\": container with ID starting with 1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619 not found: ID does not exist" containerID="1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619" Jan 21 18:18:07 crc kubenswrapper[4799]: I0121 18:18:07.841687 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619"} err="failed to get container status \"1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619\": rpc error: code = NotFound desc = could not find container \"1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619\": container with ID starting with 1e57f4dfb6297a164de57e18cfdd388daa37466c3843c5a64aa5b35347d79619 not found: ID does not exist" Jan 21 18:18:08 crc kubenswrapper[4799]: I0121 18:18:08.216942 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" path="/var/lib/kubelet/pods/d0b61517-d3bb-44d4-93d6-16b23f6fa47f/volumes" Jan 21 18:18:09 crc kubenswrapper[4799]: I0121 18:18:09.719567 4799 generic.go:334] "Generic (PLEG): container finished" podID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerID="ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562" exitCode=0 Jan 21 18:18:09 crc kubenswrapper[4799]: I0121 18:18:09.719625 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c55l8" event={"ID":"ce39fa9a-30c5-4236-807e-be93cdb96eff","Type":"ContainerDied","Data":"ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562"} Jan 21 18:18:11 crc kubenswrapper[4799]: I0121 18:18:11.765501 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c55l8" event={"ID":"ce39fa9a-30c5-4236-807e-be93cdb96eff","Type":"ContainerStarted","Data":"bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6"} Jan 21 18:18:11 crc kubenswrapper[4799]: I0121 18:18:11.797582 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c55l8" podStartSLOduration=3.267120233 podStartE2EDuration="8.797555405s" podCreationTimestamp="2026-01-21 18:18:03 +0000 UTC" firstStartedPulling="2026-01-21 18:18:05.650366233 +0000 UTC m=+2712.276656256" lastFinishedPulling="2026-01-21 18:18:11.180801395 +0000 UTC m=+2717.807091428" observedRunningTime="2026-01-21 18:18:11.790547819 +0000 UTC m=+2718.416837862" watchObservedRunningTime="2026-01-21 18:18:11.797555405 +0000 UTC m=+2718.423845428" Jan 21 18:18:13 crc kubenswrapper[4799]: I0121 18:18:13.837532 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:13 crc kubenswrapper[4799]: I0121 18:18:13.838033 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:14 crc kubenswrapper[4799]: I0121 18:18:14.891748 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c55l8" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="registry-server" probeResult="failure" output=< Jan 21 18:18:14 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 18:18:14 crc kubenswrapper[4799]: > Jan 21 18:18:18 crc kubenswrapper[4799]: I0121 18:18:18.207429 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:18:18 crc kubenswrapper[4799]: E0121 18:18:18.208110 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:18:23 crc kubenswrapper[4799]: I0121 18:18:23.889498 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:23 crc kubenswrapper[4799]: I0121 18:18:23.940352 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:24 crc kubenswrapper[4799]: I0121 18:18:24.340845 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c55l8"] Jan 21 18:18:25 crc kubenswrapper[4799]: I0121 18:18:25.919522 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c55l8" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="registry-server" containerID="cri-o://bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6" gracePeriod=2 Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.413240 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.497562 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-catalog-content\") pod \"ce39fa9a-30c5-4236-807e-be93cdb96eff\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.497986 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2ltf\" (UniqueName: \"kubernetes.io/projected/ce39fa9a-30c5-4236-807e-be93cdb96eff-kube-api-access-b2ltf\") pod \"ce39fa9a-30c5-4236-807e-be93cdb96eff\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.498021 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-utilities\") pod \"ce39fa9a-30c5-4236-807e-be93cdb96eff\" (UID: \"ce39fa9a-30c5-4236-807e-be93cdb96eff\") " Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.498831 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-utilities" (OuterVolumeSpecName: "utilities") pod "ce39fa9a-30c5-4236-807e-be93cdb96eff" (UID: "ce39fa9a-30c5-4236-807e-be93cdb96eff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.503545 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce39fa9a-30c5-4236-807e-be93cdb96eff-kube-api-access-b2ltf" (OuterVolumeSpecName: "kube-api-access-b2ltf") pod "ce39fa9a-30c5-4236-807e-be93cdb96eff" (UID: "ce39fa9a-30c5-4236-807e-be93cdb96eff"). InnerVolumeSpecName "kube-api-access-b2ltf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.600507 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2ltf\" (UniqueName: \"kubernetes.io/projected/ce39fa9a-30c5-4236-807e-be93cdb96eff-kube-api-access-b2ltf\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.600548 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.617201 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce39fa9a-30c5-4236-807e-be93cdb96eff" (UID: "ce39fa9a-30c5-4236-807e-be93cdb96eff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:18:26 crc kubenswrapper[4799]: I0121 18:18:26.702012 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce39fa9a-30c5-4236-807e-be93cdb96eff-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.029524 4799 generic.go:334] "Generic (PLEG): container finished" podID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerID="bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6" exitCode=0 Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.029589 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c55l8" event={"ID":"ce39fa9a-30c5-4236-807e-be93cdb96eff","Type":"ContainerDied","Data":"bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6"} Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.029619 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c55l8" event={"ID":"ce39fa9a-30c5-4236-807e-be93cdb96eff","Type":"ContainerDied","Data":"9f0df0048e1601d098831cfbbf2cbc51ff4c3a6fbe24f719ffc8149a685b0e04"} Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.029638 4799 scope.go:117] "RemoveContainer" containerID="bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.029803 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c55l8" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.065389 4799 scope.go:117] "RemoveContainer" containerID="ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.070822 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c55l8"] Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.081974 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c55l8"] Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.089079 4799 scope.go:117] "RemoveContainer" containerID="ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.131621 4799 scope.go:117] "RemoveContainer" containerID="bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6" Jan 21 18:18:27 crc kubenswrapper[4799]: E0121 18:18:27.132255 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6\": container with ID starting with bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6 not found: ID does not exist" containerID="bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.132329 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6"} err="failed to get container status \"bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6\": rpc error: code = NotFound desc = could not find container \"bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6\": container with ID starting with bd909b6c1400f42155a5c30a946caf5ec304d22a87a32c5601307cabc30b47a6 not found: ID does not exist" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.132374 4799 scope.go:117] "RemoveContainer" containerID="ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562" Jan 21 18:18:27 crc kubenswrapper[4799]: E0121 18:18:27.132878 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562\": container with ID starting with ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562 not found: ID does not exist" containerID="ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.132914 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562"} err="failed to get container status \"ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562\": rpc error: code = NotFound desc = could not find container \"ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562\": container with ID starting with ba9ed178f15f95b4308bad08f363f7d08f2e026a6dc05d488ddabee323d2f562 not found: ID does not exist" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.132932 4799 scope.go:117] "RemoveContainer" containerID="ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f" Jan 21 18:18:27 crc kubenswrapper[4799]: E0121 18:18:27.133231 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f\": container with ID starting with ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f not found: ID does not exist" containerID="ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f" Jan 21 18:18:27 crc kubenswrapper[4799]: I0121 18:18:27.133267 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f"} err="failed to get container status \"ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f\": rpc error: code = NotFound desc = could not find container \"ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f\": container with ID starting with ce9a8b288c1b0e45e78726fc5c7cfdaae190200ad907cf192ce3601511450a8f not found: ID does not exist" Jan 21 18:18:28 crc kubenswrapper[4799]: I0121 18:18:28.218026 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" path="/var/lib/kubelet/pods/ce39fa9a-30c5-4236-807e-be93cdb96eff/volumes" Jan 21 18:18:33 crc kubenswrapper[4799]: I0121 18:18:33.206304 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:18:34 crc kubenswrapper[4799]: I0121 18:18:34.127464 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"faf9697308cda2c1909b38aecfe9cbc2b5b80d0041c2146a3678a3b576db2ee5"} Jan 21 18:19:12 crc kubenswrapper[4799]: I0121 18:19:12.508945 4799 generic.go:334] "Generic (PLEG): container finished" podID="b5f5c54c-325e-4640-8cb5-5f8ac5c91234" containerID="06018f872981612708747689e2e5aaf8afc2dd28c1a62fe0d22665e9546efe26" exitCode=0 Jan 21 18:19:12 crc kubenswrapper[4799]: I0121 18:19:12.509079 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" event={"ID":"b5f5c54c-325e-4640-8cb5-5f8ac5c91234","Type":"ContainerDied","Data":"06018f872981612708747689e2e5aaf8afc2dd28c1a62fe0d22665e9546efe26"} Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.037806 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.238911 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-1\") pod \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.239020 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ssh-key-openstack-edpm-ipam\") pod \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.239045 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-2\") pod \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.239738 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-0\") pod \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.239815 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l99f2\" (UniqueName: \"kubernetes.io/projected/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-kube-api-access-l99f2\") pod \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.239842 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-inventory\") pod \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.239948 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-telemetry-combined-ca-bundle\") pod \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\" (UID: \"b5f5c54c-325e-4640-8cb5-5f8ac5c91234\") " Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.267552 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "b5f5c54c-325e-4640-8cb5-5f8ac5c91234" (UID: "b5f5c54c-325e-4640-8cb5-5f8ac5c91234"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.288929 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-kube-api-access-l99f2" (OuterVolumeSpecName: "kube-api-access-l99f2") pod "b5f5c54c-325e-4640-8cb5-5f8ac5c91234" (UID: "b5f5c54c-325e-4640-8cb5-5f8ac5c91234"). InnerVolumeSpecName "kube-api-access-l99f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.342638 4799 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.342708 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l99f2\" (UniqueName: \"kubernetes.io/projected/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-kube-api-access-l99f2\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.393402 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "b5f5c54c-325e-4640-8cb5-5f8ac5c91234" (UID: "b5f5c54c-325e-4640-8cb5-5f8ac5c91234"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.398010 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "b5f5c54c-325e-4640-8cb5-5f8ac5c91234" (UID: "b5f5c54c-325e-4640-8cb5-5f8ac5c91234"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.400269 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-inventory" (OuterVolumeSpecName: "inventory") pod "b5f5c54c-325e-4640-8cb5-5f8ac5c91234" (UID: "b5f5c54c-325e-4640-8cb5-5f8ac5c91234"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.402285 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "b5f5c54c-325e-4640-8cb5-5f8ac5c91234" (UID: "b5f5c54c-325e-4640-8cb5-5f8ac5c91234"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.407790 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "b5f5c54c-325e-4640-8cb5-5f8ac5c91234" (UID: "b5f5c54c-325e-4640-8cb5-5f8ac5c91234"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.445773 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.445810 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.445820 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.445833 4799 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.445844 4799 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5f5c54c-325e-4640-8cb5-5f8ac5c91234-inventory\") on node \"crc\" DevicePath \"\"" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.529656 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" event={"ID":"b5f5c54c-325e-4640-8cb5-5f8ac5c91234","Type":"ContainerDied","Data":"da180848a588d3203216dc003363d2eb9fb3a18e25234f834afc149642dcc227"} Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.529712 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da180848a588d3203216dc003363d2eb9fb3a18e25234f834afc149642dcc227" Jan 21 18:19:14 crc kubenswrapper[4799]: I0121 18:19:14.529711 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn" Jan 21 18:19:14 crc kubenswrapper[4799]: E0121 18:19:14.725094 4799 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5f5c54c_325e_4640_8cb5_5f8ac5c91234.slice/crio-da180848a588d3203216dc003363d2eb9fb3a18e25234f834afc149642dcc227\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5f5c54c_325e_4640_8cb5_5f8ac5c91234.slice\": RecentStats: unable to find data in memory cache]" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.716033 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Jan 21 18:19:54 crc kubenswrapper[4799]: E0121 18:19:54.717196 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerName="extract-content" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717217 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerName="extract-content" Jan 21 18:19:54 crc kubenswrapper[4799]: E0121 18:19:54.717227 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="extract-utilities" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717235 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="extract-utilities" Jan 21 18:19:54 crc kubenswrapper[4799]: E0121 18:19:54.717247 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerName="registry-server" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717254 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerName="registry-server" Jan 21 18:19:54 crc kubenswrapper[4799]: E0121 18:19:54.717297 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5f5c54c-325e-4640-8cb5-5f8ac5c91234" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717307 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5f5c54c-325e-4640-8cb5-5f8ac5c91234" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 21 18:19:54 crc kubenswrapper[4799]: E0121 18:19:54.717325 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerName="extract-utilities" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717332 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerName="extract-utilities" Jan 21 18:19:54 crc kubenswrapper[4799]: E0121 18:19:54.717346 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="extract-content" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717353 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="extract-content" Jan 21 18:19:54 crc kubenswrapper[4799]: E0121 18:19:54.717366 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="registry-server" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717374 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="registry-server" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717623 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5f5c54c-325e-4640-8cb5-5f8ac5c91234" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717658 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce39fa9a-30c5-4236-807e-be93cdb96eff" containerName="registry-server" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.717672 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0b61517-d3bb-44d4-93d6-16b23f6fa47f" containerName="registry-server" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.719252 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.722957 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.753513 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.796386 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvzh6\" (UniqueName: \"kubernetes.io/projected/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-kube-api-access-dvzh6\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.796681 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-dev\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.796825 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-lib-modules\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.796951 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797046 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797150 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797247 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797342 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-config-data-custom\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797426 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-nvme\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797505 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797618 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-config-data\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797779 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-scripts\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797900 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-run\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.797998 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.798082 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-sys\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.805348 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-nfs-0"] Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.807383 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.809778 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-nfs-config-data" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.816460 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-0"] Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.882443 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.885275 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.889507 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-nfs-2-config-data" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.892877 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901190 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-lib-modules\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901251 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901290 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901319 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-lib-modules\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901326 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901493 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901552 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901577 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-run\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901601 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901628 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901646 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-config-data-custom\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901672 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-nvme\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901701 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901723 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901744 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-dev\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901768 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901844 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901866 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901888 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-sys\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901919 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-config-data\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.901964 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-nvme\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902016 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902150 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902331 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-scripts\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902366 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902392 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-run\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902458 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902489 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-sys\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902529 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzh2j\" (UniqueName: \"kubernetes.io/projected/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-kube-api-access-kzh2j\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902562 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902612 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902643 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvzh6\" (UniqueName: \"kubernetes.io/projected/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-kube-api-access-dvzh6\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902668 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-dev\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902695 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.902727 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.903083 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-run\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.903274 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.903399 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-sys\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.903789 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-dev\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.910049 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-config-data-custom\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.911116 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-config-data\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.911594 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-scripts\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.927311 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvzh6\" (UniqueName: \"kubernetes.io/projected/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-kube-api-access-dvzh6\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:54 crc kubenswrapper[4799]: I0121 18:19:54.933483 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55c0bc68-cae8-4eee-9caf-37f8a26c76f9-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"55c0bc68-cae8-4eee-9caf-37f8a26c76f9\") " pod="openstack/cinder-backup-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.004998 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005081 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-run\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005099 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005138 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005158 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-dev\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005172 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005198 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005228 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005251 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005271 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005291 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-sys\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005338 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gzgs\" (UniqueName: \"kubernetes.io/projected/82503b8b-9773-4e14-9703-663675725aa9-kube-api-access-7gzgs\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005369 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005388 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005412 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005426 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005466 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005495 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzh2j\" (UniqueName: \"kubernetes.io/projected/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-kube-api-access-kzh2j\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005513 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005552 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005567 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005584 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005609 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005641 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005665 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005687 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005710 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005726 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005751 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005777 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.005906 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-locks-brick\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.006563 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-lib-modules\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.006644 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-run\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.006679 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-iscsi\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.006740 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-locks-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.006772 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-dev\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.007036 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-nvme\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.007203 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-sys\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.009140 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-etc-machine-id\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.009240 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-var-lib-cinder\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.011814 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-config-data-custom\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.013411 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-scripts\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.015036 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-combined-ca-bundle\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.022733 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzh2j\" (UniqueName: \"kubernetes.io/projected/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-kube-api-access-kzh2j\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.025474 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0658bd3c-d1f4-486d-957a-38f4eb9ccc10-config-data\") pod \"cinder-volume-nfs-0\" (UID: \"0658bd3c-d1f4-486d-957a-38f4eb9ccc10\") " pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.043513 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.108895 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gzgs\" (UniqueName: \"kubernetes.io/projected/82503b8b-9773-4e14-9703-663675725aa9-kube-api-access-7gzgs\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109005 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109030 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109147 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109184 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109320 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109340 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109530 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-lib-modules\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109579 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-run\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109548 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-nvme\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109586 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-iscsi\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.109362 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110149 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110191 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110268 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-locks-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110310 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110328 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-dev\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110339 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110363 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-sys\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110798 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110836 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110873 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.110996 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-etc-machine-id\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.111070 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-lib-cinder\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.111158 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/82503b8b-9773-4e14-9703-663675725aa9-var-locks-brick\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.114943 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-combined-ca-bundle\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.117881 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-config-data\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.123533 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-config-data-custom\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.123952 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82503b8b-9773-4e14-9703-663675725aa9-scripts\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.131888 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gzgs\" (UniqueName: \"kubernetes.io/projected/82503b8b-9773-4e14-9703-663675725aa9-kube-api-access-7gzgs\") pod \"cinder-volume-nfs-2-0\" (UID: \"82503b8b-9773-4e14-9703-663675725aa9\") " pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.132784 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.215342 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.939925 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Jan 21 18:19:55 crc kubenswrapper[4799]: I0121 18:19:55.976784 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"55c0bc68-cae8-4eee-9caf-37f8a26c76f9","Type":"ContainerStarted","Data":"f303eb355311c7b4a1d5e15be394e5a54c0260fa1e57e117eb3e05cdbc74945d"} Jan 21 18:19:56 crc kubenswrapper[4799]: I0121 18:19:56.216389 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-2-0"] Jan 21 18:19:56 crc kubenswrapper[4799]: W0121 18:19:56.230678 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82503b8b_9773_4e14_9703_663675725aa9.slice/crio-6e41fc2c38019973cd75eb14df81bce673c08238e73bed5d07d85ebef3bef462 WatchSource:0}: Error finding container 6e41fc2c38019973cd75eb14df81bce673c08238e73bed5d07d85ebef3bef462: Status 404 returned error can't find the container with id 6e41fc2c38019973cd75eb14df81bce673c08238e73bed5d07d85ebef3bef462 Jan 21 18:19:56 crc kubenswrapper[4799]: I0121 18:19:56.987353 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"82503b8b-9773-4e14-9703-663675725aa9","Type":"ContainerStarted","Data":"d4388792a0242f99cc3577f0a8e4cbaf62af1c34bf5940d25d9e224af563ae24"} Jan 21 18:19:56 crc kubenswrapper[4799]: I0121 18:19:56.988053 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"82503b8b-9773-4e14-9703-663675725aa9","Type":"ContainerStarted","Data":"7911b95d29cf08310d6b8a69123682d460aa32e1f59a93905824e2ef63fe0f12"} Jan 21 18:19:56 crc kubenswrapper[4799]: I0121 18:19:56.988064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-2-0" event={"ID":"82503b8b-9773-4e14-9703-663675725aa9","Type":"ContainerStarted","Data":"6e41fc2c38019973cd75eb14df81bce673c08238e73bed5d07d85ebef3bef462"} Jan 21 18:19:56 crc kubenswrapper[4799]: I0121 18:19:56.990896 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"55c0bc68-cae8-4eee-9caf-37f8a26c76f9","Type":"ContainerStarted","Data":"26186b59ced4b1e04b0d126434769f1b7ff20a2b1ce6eb04f13bfbbaa0891fd7"} Jan 21 18:19:56 crc kubenswrapper[4799]: I0121 18:19:56.990938 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"55c0bc68-cae8-4eee-9caf-37f8a26c76f9","Type":"ContainerStarted","Data":"9a7930ab0ec3e191082271348265653e3bec035170f83cd528dba04f2621728e"} Jan 21 18:19:57 crc kubenswrapper[4799]: I0121 18:19:57.019346 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-nfs-2-0" podStartSLOduration=2.785057158 podStartE2EDuration="3.01932232s" podCreationTimestamp="2026-01-21 18:19:54 +0000 UTC" firstStartedPulling="2026-01-21 18:19:56.235948668 +0000 UTC m=+2822.862238691" lastFinishedPulling="2026-01-21 18:19:56.47021383 +0000 UTC m=+2823.096503853" observedRunningTime="2026-01-21 18:19:57.013429545 +0000 UTC m=+2823.639719568" watchObservedRunningTime="2026-01-21 18:19:57.01932232 +0000 UTC m=+2823.645612363" Jan 21 18:19:57 crc kubenswrapper[4799]: I0121 18:19:57.050236 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.8298650800000003 podStartE2EDuration="3.050212304s" podCreationTimestamp="2026-01-21 18:19:54 +0000 UTC" firstStartedPulling="2026-01-21 18:19:55.933337673 +0000 UTC m=+2822.559627696" lastFinishedPulling="2026-01-21 18:19:56.153684897 +0000 UTC m=+2822.779974920" observedRunningTime="2026-01-21 18:19:57.040814281 +0000 UTC m=+2823.667104314" watchObservedRunningTime="2026-01-21 18:19:57.050212304 +0000 UTC m=+2823.676502327" Jan 21 18:19:57 crc kubenswrapper[4799]: I0121 18:19:57.121312 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-nfs-0"] Jan 21 18:19:58 crc kubenswrapper[4799]: I0121 18:19:58.004113 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"0658bd3c-d1f4-486d-957a-38f4eb9ccc10","Type":"ContainerStarted","Data":"018632b1fb4db43997cba0c6321f513471f0ae67b4cb63ef2edc7123f28ddd85"} Jan 21 18:19:58 crc kubenswrapper[4799]: I0121 18:19:58.005149 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"0658bd3c-d1f4-486d-957a-38f4eb9ccc10","Type":"ContainerStarted","Data":"a04494cd142e479b7d879395a81b78d752887bb0bd4f8e6289962b5221bc18d5"} Jan 21 18:19:58 crc kubenswrapper[4799]: I0121 18:19:58.005174 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-nfs-0" event={"ID":"0658bd3c-d1f4-486d-957a-38f4eb9ccc10","Type":"ContainerStarted","Data":"1266190df1f9532fa9fc2daa88a952264da7669f40c9f17e1a07cbb00be9c187"} Jan 21 18:19:58 crc kubenswrapper[4799]: I0121 18:19:58.038344 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-nfs-0" podStartSLOduration=4.038316662 podStartE2EDuration="4.038316662s" podCreationTimestamp="2026-01-21 18:19:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:19:58.030559745 +0000 UTC m=+2824.656849788" watchObservedRunningTime="2026-01-21 18:19:58.038316662 +0000 UTC m=+2824.664606685" Jan 21 18:20:00 crc kubenswrapper[4799]: I0121 18:20:00.044589 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Jan 21 18:20:00 crc kubenswrapper[4799]: I0121 18:20:00.134006 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-nfs-0" Jan 21 18:20:00 crc kubenswrapper[4799]: I0121 18:20:00.219433 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:20:05 crc kubenswrapper[4799]: I0121 18:20:05.246786 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Jan 21 18:20:05 crc kubenswrapper[4799]: I0121 18:20:05.385496 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-nfs-0" Jan 21 18:20:05 crc kubenswrapper[4799]: I0121 18:20:05.481980 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-nfs-2-0" Jan 21 18:20:55 crc kubenswrapper[4799]: I0121 18:20:55.970890 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:20:55 crc kubenswrapper[4799]: I0121 18:20:55.971654 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.150523 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.151325 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="prometheus" containerID="cri-o://62b90ea328d32659a6d409e4ad624ce3599c941829209ec8c107a40282ee0291" gracePeriod=600 Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.151847 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="thanos-sidecar" containerID="cri-o://fa486aa1317a4359b8979a53f7d7df537d219d03994fe2085b132a44ebc30d8f" gracePeriod=600 Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.151895 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="config-reloader" containerID="cri-o://6a65adc16182214bd6ea23d9c0f4788f3d3c92e7d8b2f14e3f9aeb765615d6d8" gracePeriod=600 Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.738536 4799 generic.go:334] "Generic (PLEG): container finished" podID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerID="fa486aa1317a4359b8979a53f7d7df537d219d03994fe2085b132a44ebc30d8f" exitCode=0 Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.738850 4799 generic.go:334] "Generic (PLEG): container finished" podID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerID="6a65adc16182214bd6ea23d9c0f4788f3d3c92e7d8b2f14e3f9aeb765615d6d8" exitCode=0 Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.738862 4799 generic.go:334] "Generic (PLEG): container finished" podID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerID="62b90ea328d32659a6d409e4ad624ce3599c941829209ec8c107a40282ee0291" exitCode=0 Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.738642 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerDied","Data":"fa486aa1317a4359b8979a53f7d7df537d219d03994fe2085b132a44ebc30d8f"} Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.738907 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerDied","Data":"6a65adc16182214bd6ea23d9c0f4788f3d3c92e7d8b2f14e3f9aeb765615d6d8"} Jan 21 18:21:01 crc kubenswrapper[4799]: I0121 18:21:01.738921 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerDied","Data":"62b90ea328d32659a6d409e4ad624ce3599c941829209ec8c107a40282ee0291"} Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.124882 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.191294 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.191378 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-secret-combined-ca-bundle\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.191432 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-1\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.191463 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.191491 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-tls-assets\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.191527 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7h9pc\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-kube-api-access-7h9pc\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.192139 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.192177 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-0\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.192220 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.192274 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-2\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.192356 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-thanos-prometheus-http-client-file\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.192404 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config-out\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.192429 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\" (UID: \"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb\") " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.195220 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-1" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-1") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.195269 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.195848 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-2" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-2") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.200911 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.201498 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.203067 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config-out" (OuterVolumeSpecName: "config-out") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.203219 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.203275 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.203501 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.203891 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-kube-api-access-7h9pc" (OuterVolumeSpecName: "kube-api-access-7h9pc") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "kube-api-access-7h9pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.206456 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config" (OuterVolumeSpecName: "config") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300625 4799 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config-out\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300668 4799 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300684 4799 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300695 4799 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300707 4799 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-1\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300718 4799 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-tls-assets\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300754 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7h9pc\" (UniqueName: \"kubernetes.io/projected/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-kube-api-access-7h9pc\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300770 4799 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300782 4799 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300798 4799 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-prometheus-metric-storage-rulefiles-2\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.300811 4799 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.332462 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config" (OuterVolumeSpecName: "web-config") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.402537 4799 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb-web-config\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.512490 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" (UID: "af6d3ab2-5545-4a0f-b1fa-472cc95d13bb"). InnerVolumeSpecName "pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.606637 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") on node \"crc\" " Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.637115 4799 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.637324 4799 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea") on node "crc" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.709528 4799 reconciler_common.go:293] "Volume detached for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.751219 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af6d3ab2-5545-4a0f-b1fa-472cc95d13bb","Type":"ContainerDied","Data":"85f9b7a8eef993d349d7c1381f2017561667ccb8e633fb5a9ce436d0a05ab278"} Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.751289 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.751322 4799 scope.go:117] "RemoveContainer" containerID="fa486aa1317a4359b8979a53f7d7df537d219d03994fe2085b132a44ebc30d8f" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.784639 4799 scope.go:117] "RemoveContainer" containerID="6a65adc16182214bd6ea23d9c0f4788f3d3c92e7d8b2f14e3f9aeb765615d6d8" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.786699 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.805092 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.811002 4799 scope.go:117] "RemoveContainer" containerID="62b90ea328d32659a6d409e4ad624ce3599c941829209ec8c107a40282ee0291" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.838873 4799 scope.go:117] "RemoveContainer" containerID="4d8dd5bbca2bb8a38e11eec588bc659f8b91a05c9d99ded1b5c5f728c4a49beb" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.842862 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 18:21:02 crc kubenswrapper[4799]: E0121 18:21:02.843843 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="prometheus" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.843870 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="prometheus" Jan 21 18:21:02 crc kubenswrapper[4799]: E0121 18:21:02.843885 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="init-config-reloader" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.843894 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="init-config-reloader" Jan 21 18:21:02 crc kubenswrapper[4799]: E0121 18:21:02.843911 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="thanos-sidecar" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.843919 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="thanos-sidecar" Jan 21 18:21:02 crc kubenswrapper[4799]: E0121 18:21:02.843973 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="config-reloader" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.843981 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="config-reloader" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.845225 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="config-reloader" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.845255 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="thanos-sidecar" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.845276 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" containerName="prometheus" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.848552 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.851702 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.851841 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.851849 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.852014 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-8j9xl" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.875118 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.877486 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.878602 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.879984 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 21 18:21:02 crc kubenswrapper[4799]: I0121 18:21:02.902242 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.021954 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022073 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022170 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5e364896-23a0-4e1e-9e15-7f637ee5326c-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022224 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022266 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022298 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022347 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022375 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022410 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022477 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmrkb\" (UniqueName: \"kubernetes.io/projected/5e364896-23a0-4e1e-9e15-7f637ee5326c-kube-api-access-pmrkb\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022502 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5e364896-23a0-4e1e-9e15-7f637ee5326c-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022546 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-config\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.022572 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.124836 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.184413 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.184548 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.184707 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmrkb\" (UniqueName: \"kubernetes.io/projected/5e364896-23a0-4e1e-9e15-7f637ee5326c-kube-api-access-pmrkb\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.184751 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5e364896-23a0-4e1e-9e15-7f637ee5326c-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.184837 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-config\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.184878 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.185003 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.185103 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.185256 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5e364896-23a0-4e1e-9e15-7f637ee5326c-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.185339 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.185410 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.185451 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.126534 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.187870 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.188859 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/5e364896-23a0-4e1e-9e15-7f637ee5326c-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.192243 4799 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.192266 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-config\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.192293 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8cc5b1a9ff3eab274f8795ef882996bdac004679de968d37b59819fb3c1cc7c5/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.193477 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.195553 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.197208 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.197231 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.200752 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/5e364896-23a0-4e1e-9e15-7f637ee5326c-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.204438 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5e364896-23a0-4e1e-9e15-7f637ee5326c-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.217834 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmrkb\" (UniqueName: \"kubernetes.io/projected/5e364896-23a0-4e1e-9e15-7f637ee5326c-kube-api-access-pmrkb\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.230592 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5e364896-23a0-4e1e-9e15-7f637ee5326c-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.245572 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4c3bc3d7-3ce4-4911-a7fc-55d073cceaea\") pod \"prometheus-metric-storage-0\" (UID: \"5e364896-23a0-4e1e-9e15-7f637ee5326c\") " pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:03 crc kubenswrapper[4799]: I0121 18:21:03.540767 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:04 crc kubenswrapper[4799]: I0121 18:21:04.023256 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 21 18:21:04 crc kubenswrapper[4799]: I0121 18:21:04.217620 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af6d3ab2-5545-4a0f-b1fa-472cc95d13bb" path="/var/lib/kubelet/pods/af6d3ab2-5545-4a0f-b1fa-472cc95d13bb/volumes" Jan 21 18:21:04 crc kubenswrapper[4799]: I0121 18:21:04.780502 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5e364896-23a0-4e1e-9e15-7f637ee5326c","Type":"ContainerStarted","Data":"d291ce91bf193d490c263b146a2c346e8f48890c034161dd243b93757e77224c"} Jan 21 18:21:07 crc kubenswrapper[4799]: I0121 18:21:07.815344 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5e364896-23a0-4e1e-9e15-7f637ee5326c","Type":"ContainerStarted","Data":"ac1d8e83ff66e44efceddba12cefa3a36bd7617bc7c2b0735e592d8d1ad37dc1"} Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.011678 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hq7cs"] Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.020303 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.028996 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hq7cs"] Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.126861 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-utilities\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.126945 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86b5k\" (UniqueName: \"kubernetes.io/projected/ccd4eac7-fb24-4229-b18e-a3749fb82237-kube-api-access-86b5k\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.127167 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-catalog-content\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.229748 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-utilities\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.229856 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86b5k\" (UniqueName: \"kubernetes.io/projected/ccd4eac7-fb24-4229-b18e-a3749fb82237-kube-api-access-86b5k\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.230040 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-catalog-content\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.230385 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-utilities\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.230583 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-catalog-content\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.260492 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86b5k\" (UniqueName: \"kubernetes.io/projected/ccd4eac7-fb24-4229-b18e-a3749fb82237-kube-api-access-86b5k\") pod \"certified-operators-hq7cs\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.378690 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:12 crc kubenswrapper[4799]: I0121 18:21:12.920027 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hq7cs"] Jan 21 18:21:13 crc kubenswrapper[4799]: I0121 18:21:13.880639 4799 generic.go:334] "Generic (PLEG): container finished" podID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerID="d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d" exitCode=0 Jan 21 18:21:13 crc kubenswrapper[4799]: I0121 18:21:13.880727 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hq7cs" event={"ID":"ccd4eac7-fb24-4229-b18e-a3749fb82237","Type":"ContainerDied","Data":"d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d"} Jan 21 18:21:13 crc kubenswrapper[4799]: I0121 18:21:13.881074 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hq7cs" event={"ID":"ccd4eac7-fb24-4229-b18e-a3749fb82237","Type":"ContainerStarted","Data":"8a09c27a1402d28558d991c493e5e757815d24b4d7ffc563b905db1af9c7b3a5"} Jan 21 18:21:14 crc kubenswrapper[4799]: I0121 18:21:14.894306 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hq7cs" event={"ID":"ccd4eac7-fb24-4229-b18e-a3749fb82237","Type":"ContainerStarted","Data":"b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462"} Jan 21 18:21:15 crc kubenswrapper[4799]: I0121 18:21:15.908006 4799 generic.go:334] "Generic (PLEG): container finished" podID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerID="b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462" exitCode=0 Jan 21 18:21:15 crc kubenswrapper[4799]: I0121 18:21:15.908062 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hq7cs" event={"ID":"ccd4eac7-fb24-4229-b18e-a3749fb82237","Type":"ContainerDied","Data":"b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462"} Jan 21 18:21:16 crc kubenswrapper[4799]: I0121 18:21:16.920868 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hq7cs" event={"ID":"ccd4eac7-fb24-4229-b18e-a3749fb82237","Type":"ContainerStarted","Data":"2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8"} Jan 21 18:21:16 crc kubenswrapper[4799]: I0121 18:21:16.925440 4799 generic.go:334] "Generic (PLEG): container finished" podID="5e364896-23a0-4e1e-9e15-7f637ee5326c" containerID="ac1d8e83ff66e44efceddba12cefa3a36bd7617bc7c2b0735e592d8d1ad37dc1" exitCode=0 Jan 21 18:21:16 crc kubenswrapper[4799]: I0121 18:21:16.925497 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5e364896-23a0-4e1e-9e15-7f637ee5326c","Type":"ContainerDied","Data":"ac1d8e83ff66e44efceddba12cefa3a36bd7617bc7c2b0735e592d8d1ad37dc1"} Jan 21 18:21:16 crc kubenswrapper[4799]: I0121 18:21:16.963294 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hq7cs" podStartSLOduration=3.539262763 podStartE2EDuration="5.963277491s" podCreationTimestamp="2026-01-21 18:21:11 +0000 UTC" firstStartedPulling="2026-01-21 18:21:13.885459501 +0000 UTC m=+2900.511749544" lastFinishedPulling="2026-01-21 18:21:16.309474249 +0000 UTC m=+2902.935764272" observedRunningTime="2026-01-21 18:21:16.955625763 +0000 UTC m=+2903.581915786" watchObservedRunningTime="2026-01-21 18:21:16.963277491 +0000 UTC m=+2903.589567514" Jan 21 18:21:17 crc kubenswrapper[4799]: I0121 18:21:17.957221 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5e364896-23a0-4e1e-9e15-7f637ee5326c","Type":"ContainerStarted","Data":"4d576d9651fb5d93173fbac75b289dd1360091a1a69d17db01826cadf9791011"} Jan 21 18:21:21 crc kubenswrapper[4799]: I0121 18:21:21.999063 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5e364896-23a0-4e1e-9e15-7f637ee5326c","Type":"ContainerStarted","Data":"d2a30636d1d8b80109d440724a7d2b78314a253d6f181c7a0cb9ea9077251963"} Jan 21 18:21:21 crc kubenswrapper[4799]: I0121 18:21:21.999656 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"5e364896-23a0-4e1e-9e15-7f637ee5326c","Type":"ContainerStarted","Data":"ec4175294c3520d10ed4098b8dbc1bb88f08a028ce5546f6243ef9be21178862"} Jan 21 18:21:22 crc kubenswrapper[4799]: I0121 18:21:22.029830 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=20.029807742 podStartE2EDuration="20.029807742s" podCreationTimestamp="2026-01-21 18:21:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 18:21:22.028280238 +0000 UTC m=+2908.654570281" watchObservedRunningTime="2026-01-21 18:21:22.029807742 +0000 UTC m=+2908.656097765" Jan 21 18:21:22 crc kubenswrapper[4799]: I0121 18:21:22.378953 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:22 crc kubenswrapper[4799]: I0121 18:21:22.379061 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:22 crc kubenswrapper[4799]: I0121 18:21:22.439467 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:23 crc kubenswrapper[4799]: I0121 18:21:23.082740 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:23 crc kubenswrapper[4799]: I0121 18:21:23.138724 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hq7cs"] Jan 21 18:21:23 crc kubenswrapper[4799]: I0121 18:21:23.554995 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.035904 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hq7cs" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerName="registry-server" containerID="cri-o://2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8" gracePeriod=2 Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.544053 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.548455 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-catalog-content\") pod \"ccd4eac7-fb24-4229-b18e-a3749fb82237\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.548505 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-utilities\") pod \"ccd4eac7-fb24-4229-b18e-a3749fb82237\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.548586 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86b5k\" (UniqueName: \"kubernetes.io/projected/ccd4eac7-fb24-4229-b18e-a3749fb82237-kube-api-access-86b5k\") pod \"ccd4eac7-fb24-4229-b18e-a3749fb82237\" (UID: \"ccd4eac7-fb24-4229-b18e-a3749fb82237\") " Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.551942 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-utilities" (OuterVolumeSpecName: "utilities") pod "ccd4eac7-fb24-4229-b18e-a3749fb82237" (UID: "ccd4eac7-fb24-4229-b18e-a3749fb82237"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.554627 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccd4eac7-fb24-4229-b18e-a3749fb82237-kube-api-access-86b5k" (OuterVolumeSpecName: "kube-api-access-86b5k") pod "ccd4eac7-fb24-4229-b18e-a3749fb82237" (UID: "ccd4eac7-fb24-4229-b18e-a3749fb82237"). InnerVolumeSpecName "kube-api-access-86b5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.623638 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ccd4eac7-fb24-4229-b18e-a3749fb82237" (UID: "ccd4eac7-fb24-4229-b18e-a3749fb82237"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.650584 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.650619 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccd4eac7-fb24-4229-b18e-a3749fb82237-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.650637 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86b5k\" (UniqueName: \"kubernetes.io/projected/ccd4eac7-fb24-4229-b18e-a3749fb82237-kube-api-access-86b5k\") on node \"crc\" DevicePath \"\"" Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.970548 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:21:25 crc kubenswrapper[4799]: I0121 18:21:25.970631 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.049792 4799 generic.go:334] "Generic (PLEG): container finished" podID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerID="2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8" exitCode=0 Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.049850 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hq7cs" event={"ID":"ccd4eac7-fb24-4229-b18e-a3749fb82237","Type":"ContainerDied","Data":"2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8"} Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.049879 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hq7cs" event={"ID":"ccd4eac7-fb24-4229-b18e-a3749fb82237","Type":"ContainerDied","Data":"8a09c27a1402d28558d991c493e5e757815d24b4d7ffc563b905db1af9c7b3a5"} Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.049900 4799 scope.go:117] "RemoveContainer" containerID="2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.050030 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hq7cs" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.094215 4799 scope.go:117] "RemoveContainer" containerID="b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.113309 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hq7cs"] Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.122799 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hq7cs"] Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.124724 4799 scope.go:117] "RemoveContainer" containerID="d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.177018 4799 scope.go:117] "RemoveContainer" containerID="2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8" Jan 21 18:21:26 crc kubenswrapper[4799]: E0121 18:21:26.177611 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8\": container with ID starting with 2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8 not found: ID does not exist" containerID="2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.177645 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8"} err="failed to get container status \"2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8\": rpc error: code = NotFound desc = could not find container \"2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8\": container with ID starting with 2734b74bbd30ba72facf2c74e3914189184d28f492375990457b0c44b81ebfb8 not found: ID does not exist" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.177688 4799 scope.go:117] "RemoveContainer" containerID="b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462" Jan 21 18:21:26 crc kubenswrapper[4799]: E0121 18:21:26.178188 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462\": container with ID starting with b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462 not found: ID does not exist" containerID="b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.178221 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462"} err="failed to get container status \"b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462\": rpc error: code = NotFound desc = could not find container \"b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462\": container with ID starting with b8ec2e55e08d2d1c3e76b7560108b06356265a659e92429a7f4f8e95e7458462 not found: ID does not exist" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.178241 4799 scope.go:117] "RemoveContainer" containerID="d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d" Jan 21 18:21:26 crc kubenswrapper[4799]: E0121 18:21:26.178581 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d\": container with ID starting with d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d not found: ID does not exist" containerID="d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.178643 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d"} err="failed to get container status \"d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d\": rpc error: code = NotFound desc = could not find container \"d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d\": container with ID starting with d5af8c28832e63d1dba64229d2631f512905635c65d75dde180c9af46ae4b32d not found: ID does not exist" Jan 21 18:21:26 crc kubenswrapper[4799]: I0121 18:21:26.222223 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" path="/var/lib/kubelet/pods/ccd4eac7-fb24-4229-b18e-a3749fb82237/volumes" Jan 21 18:21:33 crc kubenswrapper[4799]: I0121 18:21:33.541390 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:33 crc kubenswrapper[4799]: I0121 18:21:33.546788 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:34 crc kubenswrapper[4799]: I0121 18:21:34.157118 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.754173 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Jan 21 18:21:49 crc kubenswrapper[4799]: E0121 18:21:49.755278 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerName="extract-content" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.755293 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerName="extract-content" Jan 21 18:21:49 crc kubenswrapper[4799]: E0121 18:21:49.755325 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerName="registry-server" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.755332 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerName="registry-server" Jan 21 18:21:49 crc kubenswrapper[4799]: E0121 18:21:49.755345 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerName="extract-utilities" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.755352 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerName="extract-utilities" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.755570 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd4eac7-fb24-4229-b18e-a3749fb82237" containerName="registry-server" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.756294 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.759452 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.759451 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.760708 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jqx6b" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.760873 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.774634 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.878913 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-config-data\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.879014 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.879048 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkhww\" (UniqueName: \"kubernetes.io/projected/384bc0b0-0caa-45e3-b892-155def4ed881-kube-api-access-xkhww\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.879067 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.879095 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.879150 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.879355 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.879423 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.879643 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.981602 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkhww\" (UniqueName: \"kubernetes.io/projected/384bc0b0-0caa-45e3-b892-155def4ed881-kube-api-access-xkhww\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.981662 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.981704 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.981765 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.981818 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.981845 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.981913 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.982027 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-config-data\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.982113 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.982307 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.982805 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.983281 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.983381 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.988611 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-config-data\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.995205 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.995810 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:49 crc kubenswrapper[4799]: I0121 18:21:49.996045 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:50 crc kubenswrapper[4799]: I0121 18:21:50.004179 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkhww\" (UniqueName: \"kubernetes.io/projected/384bc0b0-0caa-45e3-b892-155def4ed881-kube-api-access-xkhww\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:50 crc kubenswrapper[4799]: I0121 18:21:50.022518 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " pod="openstack/tempest-tests-tempest" Jan 21 18:21:50 crc kubenswrapper[4799]: I0121 18:21:50.110356 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 21 18:21:50 crc kubenswrapper[4799]: I0121 18:21:50.618509 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Jan 21 18:21:51 crc kubenswrapper[4799]: I0121 18:21:51.332358 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"384bc0b0-0caa-45e3-b892-155def4ed881","Type":"ContainerStarted","Data":"001098c4d5fb85163635be7b8aa48e9ba2cfc4ff50b6ce93cc6bbe508fb6d6c8"} Jan 21 18:21:55 crc kubenswrapper[4799]: I0121 18:21:55.970904 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:21:55 crc kubenswrapper[4799]: I0121 18:21:55.971423 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:21:55 crc kubenswrapper[4799]: I0121 18:21:55.971475 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:21:55 crc kubenswrapper[4799]: I0121 18:21:55.972325 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"faf9697308cda2c1909b38aecfe9cbc2b5b80d0041c2146a3678a3b576db2ee5"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:21:55 crc kubenswrapper[4799]: I0121 18:21:55.972381 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://faf9697308cda2c1909b38aecfe9cbc2b5b80d0041c2146a3678a3b576db2ee5" gracePeriod=600 Jan 21 18:21:56 crc kubenswrapper[4799]: I0121 18:21:56.380930 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="faf9697308cda2c1909b38aecfe9cbc2b5b80d0041c2146a3678a3b576db2ee5" exitCode=0 Jan 21 18:21:56 crc kubenswrapper[4799]: I0121 18:21:56.380945 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"faf9697308cda2c1909b38aecfe9cbc2b5b80d0041c2146a3678a3b576db2ee5"} Jan 21 18:21:56 crc kubenswrapper[4799]: I0121 18:21:56.381345 4799 scope.go:117] "RemoveContainer" containerID="b0db7b5bbcc92d5e4a0dde32baebcda6cc16d80d611c6a4d0bf684c071ee565b" Jan 21 18:22:00 crc kubenswrapper[4799]: I0121 18:22:00.370729 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Jan 21 18:22:01 crc kubenswrapper[4799]: I0121 18:22:01.437743 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"384bc0b0-0caa-45e3-b892-155def4ed881","Type":"ContainerStarted","Data":"4e1f24192c5c16b509a03af3a966e0e1f354e83f7aca4b01ba0c6702b84caed0"} Jan 21 18:22:01 crc kubenswrapper[4799]: I0121 18:22:01.442223 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d"} Jan 21 18:22:01 crc kubenswrapper[4799]: I0121 18:22:01.462723 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.718446315 podStartE2EDuration="13.462698218s" podCreationTimestamp="2026-01-21 18:21:48 +0000 UTC" firstStartedPulling="2026-01-21 18:21:50.622596226 +0000 UTC m=+2937.248886249" lastFinishedPulling="2026-01-21 18:22:00.366848129 +0000 UTC m=+2946.993138152" observedRunningTime="2026-01-21 18:22:01.45714763 +0000 UTC m=+2948.083437663" watchObservedRunningTime="2026-01-21 18:22:01.462698218 +0000 UTC m=+2948.088988251" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.135220 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c2vxb"] Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.141533 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.152191 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c2vxb"] Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.224927 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-utilities\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.224974 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89fhh\" (UniqueName: \"kubernetes.io/projected/0d747413-62f7-4d5e-be6e-9be00e3c279a-kube-api-access-89fhh\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.225190 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-catalog-content\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.327182 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89fhh\" (UniqueName: \"kubernetes.io/projected/0d747413-62f7-4d5e-be6e-9be00e3c279a-kube-api-access-89fhh\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.327460 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-catalog-content\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.327598 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-utilities\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.328191 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-catalog-content\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.328318 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-utilities\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.358157 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89fhh\" (UniqueName: \"kubernetes.io/projected/0d747413-62f7-4d5e-be6e-9be00e3c279a-kube-api-access-89fhh\") pod \"community-operators-c2vxb\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:05 crc kubenswrapper[4799]: I0121 18:22:05.469432 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:06 crc kubenswrapper[4799]: I0121 18:22:06.111581 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c2vxb"] Jan 21 18:22:06 crc kubenswrapper[4799]: W0121 18:22:06.119827 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d747413_62f7_4d5e_be6e_9be00e3c279a.slice/crio-7c4ae856a1763ac058ac1c897e95a3fd2618c37004be5ab7272f6489b56ddce4 WatchSource:0}: Error finding container 7c4ae856a1763ac058ac1c897e95a3fd2618c37004be5ab7272f6489b56ddce4: Status 404 returned error can't find the container with id 7c4ae856a1763ac058ac1c897e95a3fd2618c37004be5ab7272f6489b56ddce4 Jan 21 18:22:06 crc kubenswrapper[4799]: I0121 18:22:06.495332 4799 generic.go:334] "Generic (PLEG): container finished" podID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerID="8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7" exitCode=0 Jan 21 18:22:06 crc kubenswrapper[4799]: I0121 18:22:06.495394 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2vxb" event={"ID":"0d747413-62f7-4d5e-be6e-9be00e3c279a","Type":"ContainerDied","Data":"8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7"} Jan 21 18:22:06 crc kubenswrapper[4799]: I0121 18:22:06.495436 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2vxb" event={"ID":"0d747413-62f7-4d5e-be6e-9be00e3c279a","Type":"ContainerStarted","Data":"7c4ae856a1763ac058ac1c897e95a3fd2618c37004be5ab7272f6489b56ddce4"} Jan 21 18:22:06 crc kubenswrapper[4799]: I0121 18:22:06.497867 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:22:07 crc kubenswrapper[4799]: I0121 18:22:07.507140 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2vxb" event={"ID":"0d747413-62f7-4d5e-be6e-9be00e3c279a","Type":"ContainerStarted","Data":"777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a"} Jan 21 18:22:09 crc kubenswrapper[4799]: I0121 18:22:09.530393 4799 generic.go:334] "Generic (PLEG): container finished" podID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerID="777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a" exitCode=0 Jan 21 18:22:09 crc kubenswrapper[4799]: I0121 18:22:09.530482 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2vxb" event={"ID":"0d747413-62f7-4d5e-be6e-9be00e3c279a","Type":"ContainerDied","Data":"777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a"} Jan 21 18:22:10 crc kubenswrapper[4799]: I0121 18:22:10.543976 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2vxb" event={"ID":"0d747413-62f7-4d5e-be6e-9be00e3c279a","Type":"ContainerStarted","Data":"8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668"} Jan 21 18:22:10 crc kubenswrapper[4799]: I0121 18:22:10.572763 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c2vxb" podStartSLOduration=2.146007057 podStartE2EDuration="5.572738719s" podCreationTimestamp="2026-01-21 18:22:05 +0000 UTC" firstStartedPulling="2026-01-21 18:22:06.497647481 +0000 UTC m=+2953.123937504" lastFinishedPulling="2026-01-21 18:22:09.924379143 +0000 UTC m=+2956.550669166" observedRunningTime="2026-01-21 18:22:10.562604541 +0000 UTC m=+2957.188894624" watchObservedRunningTime="2026-01-21 18:22:10.572738719 +0000 UTC m=+2957.199028742" Jan 21 18:22:15 crc kubenswrapper[4799]: I0121 18:22:15.470326 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:15 crc kubenswrapper[4799]: I0121 18:22:15.471009 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:15 crc kubenswrapper[4799]: I0121 18:22:15.524886 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:15 crc kubenswrapper[4799]: I0121 18:22:15.644104 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:15 crc kubenswrapper[4799]: I0121 18:22:15.763882 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c2vxb"] Jan 21 18:22:17 crc kubenswrapper[4799]: I0121 18:22:17.615648 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c2vxb" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerName="registry-server" containerID="cri-o://8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668" gracePeriod=2 Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.166034 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.338788 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-catalog-content\") pod \"0d747413-62f7-4d5e-be6e-9be00e3c279a\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.338923 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89fhh\" (UniqueName: \"kubernetes.io/projected/0d747413-62f7-4d5e-be6e-9be00e3c279a-kube-api-access-89fhh\") pod \"0d747413-62f7-4d5e-be6e-9be00e3c279a\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.339150 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-utilities\") pod \"0d747413-62f7-4d5e-be6e-9be00e3c279a\" (UID: \"0d747413-62f7-4d5e-be6e-9be00e3c279a\") " Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.340705 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-utilities" (OuterVolumeSpecName: "utilities") pod "0d747413-62f7-4d5e-be6e-9be00e3c279a" (UID: "0d747413-62f7-4d5e-be6e-9be00e3c279a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.352514 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d747413-62f7-4d5e-be6e-9be00e3c279a-kube-api-access-89fhh" (OuterVolumeSpecName: "kube-api-access-89fhh") pod "0d747413-62f7-4d5e-be6e-9be00e3c279a" (UID: "0d747413-62f7-4d5e-be6e-9be00e3c279a"). InnerVolumeSpecName "kube-api-access-89fhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.393283 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d747413-62f7-4d5e-be6e-9be00e3c279a" (UID: "0d747413-62f7-4d5e-be6e-9be00e3c279a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.441850 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.441887 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d747413-62f7-4d5e-be6e-9be00e3c279a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.441902 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89fhh\" (UniqueName: \"kubernetes.io/projected/0d747413-62f7-4d5e-be6e-9be00e3c279a-kube-api-access-89fhh\") on node \"crc\" DevicePath \"\"" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.627931 4799 generic.go:334] "Generic (PLEG): container finished" podID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerID="8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668" exitCode=0 Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.627984 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2vxb" event={"ID":"0d747413-62f7-4d5e-be6e-9be00e3c279a","Type":"ContainerDied","Data":"8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668"} Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.628015 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c2vxb" event={"ID":"0d747413-62f7-4d5e-be6e-9be00e3c279a","Type":"ContainerDied","Data":"7c4ae856a1763ac058ac1c897e95a3fd2618c37004be5ab7272f6489b56ddce4"} Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.628034 4799 scope.go:117] "RemoveContainer" containerID="8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.628079 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c2vxb" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.651320 4799 scope.go:117] "RemoveContainer" containerID="777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.667465 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c2vxb"] Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.676239 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c2vxb"] Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.698982 4799 scope.go:117] "RemoveContainer" containerID="8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.747789 4799 scope.go:117] "RemoveContainer" containerID="8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668" Jan 21 18:22:18 crc kubenswrapper[4799]: E0121 18:22:18.750815 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668\": container with ID starting with 8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668 not found: ID does not exist" containerID="8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.750877 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668"} err="failed to get container status \"8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668\": rpc error: code = NotFound desc = could not find container \"8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668\": container with ID starting with 8cf219afd1cdf03e2cf24e710a3af7adcf2ae340261d2a0a3d7ca96542561668 not found: ID does not exist" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.750916 4799 scope.go:117] "RemoveContainer" containerID="777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a" Jan 21 18:22:18 crc kubenswrapper[4799]: E0121 18:22:18.754678 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a\": container with ID starting with 777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a not found: ID does not exist" containerID="777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.754708 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a"} err="failed to get container status \"777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a\": rpc error: code = NotFound desc = could not find container \"777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a\": container with ID starting with 777fe96c01f011b473c765d555fe0c86d843a7ada9fec0b050e4dc97f9a2c66a not found: ID does not exist" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.754729 4799 scope.go:117] "RemoveContainer" containerID="8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7" Jan 21 18:22:18 crc kubenswrapper[4799]: E0121 18:22:18.756637 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7\": container with ID starting with 8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7 not found: ID does not exist" containerID="8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7" Jan 21 18:22:18 crc kubenswrapper[4799]: I0121 18:22:18.756696 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7"} err="failed to get container status \"8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7\": rpc error: code = NotFound desc = could not find container \"8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7\": container with ID starting with 8f5fb2662d735daf20583d439ca75ba0c4cecac32ea881e2fcf15d7f973518a7 not found: ID does not exist" Jan 21 18:22:20 crc kubenswrapper[4799]: I0121 18:22:20.219810 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" path="/var/lib/kubelet/pods/0d747413-62f7-4d5e-be6e-9be00e3c279a/volumes" Jan 21 18:24:25 crc kubenswrapper[4799]: I0121 18:24:25.970952 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:24:25 crc kubenswrapper[4799]: I0121 18:24:25.971517 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:24:55 crc kubenswrapper[4799]: I0121 18:24:55.971412 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:24:55 crc kubenswrapper[4799]: I0121 18:24:55.972003 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:25:25 crc kubenswrapper[4799]: I0121 18:25:25.970730 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:25:25 crc kubenswrapper[4799]: I0121 18:25:25.971235 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:25:25 crc kubenswrapper[4799]: I0121 18:25:25.971291 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:25:25 crc kubenswrapper[4799]: I0121 18:25:25.972066 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:25:25 crc kubenswrapper[4799]: I0121 18:25:25.972167 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" gracePeriod=600 Jan 21 18:25:26 crc kubenswrapper[4799]: E0121 18:25:26.101177 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:25:26 crc kubenswrapper[4799]: I0121 18:25:26.699379 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" exitCode=0 Jan 21 18:25:26 crc kubenswrapper[4799]: I0121 18:25:26.699606 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d"} Jan 21 18:25:26 crc kubenswrapper[4799]: I0121 18:25:26.699669 4799 scope.go:117] "RemoveContainer" containerID="faf9697308cda2c1909b38aecfe9cbc2b5b80d0041c2146a3678a3b576db2ee5" Jan 21 18:25:26 crc kubenswrapper[4799]: I0121 18:25:26.700385 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:25:26 crc kubenswrapper[4799]: E0121 18:25:26.700724 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:25:37 crc kubenswrapper[4799]: I0121 18:25:37.205360 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:25:37 crc kubenswrapper[4799]: E0121 18:25:37.206151 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:25:52 crc kubenswrapper[4799]: I0121 18:25:52.206188 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:25:52 crc kubenswrapper[4799]: E0121 18:25:52.206977 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:26:07 crc kubenswrapper[4799]: I0121 18:26:07.205483 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:26:07 crc kubenswrapper[4799]: E0121 18:26:07.206537 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:26:20 crc kubenswrapper[4799]: I0121 18:26:20.205392 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:26:20 crc kubenswrapper[4799]: E0121 18:26:20.206086 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:26:31 crc kubenswrapper[4799]: I0121 18:26:31.205365 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:26:31 crc kubenswrapper[4799]: E0121 18:26:31.206410 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:26:44 crc kubenswrapper[4799]: I0121 18:26:44.216709 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:26:44 crc kubenswrapper[4799]: E0121 18:26:44.217821 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:26:59 crc kubenswrapper[4799]: I0121 18:26:59.205704 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:26:59 crc kubenswrapper[4799]: E0121 18:26:59.208761 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:27:10 crc kubenswrapper[4799]: I0121 18:27:10.205659 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:27:10 crc kubenswrapper[4799]: E0121 18:27:10.207034 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:27:22 crc kubenswrapper[4799]: I0121 18:27:22.205159 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:27:22 crc kubenswrapper[4799]: E0121 18:27:22.205844 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:27:35 crc kubenswrapper[4799]: I0121 18:27:35.205809 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:27:35 crc kubenswrapper[4799]: E0121 18:27:35.206870 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:27:47 crc kubenswrapper[4799]: I0121 18:27:47.205715 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:27:47 crc kubenswrapper[4799]: E0121 18:27:47.206708 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:27:59 crc kubenswrapper[4799]: I0121 18:27:59.205082 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:27:59 crc kubenswrapper[4799]: E0121 18:27:59.205804 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:28:14 crc kubenswrapper[4799]: I0121 18:28:14.214170 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:28:14 crc kubenswrapper[4799]: E0121 18:28:14.215439 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:28:28 crc kubenswrapper[4799]: I0121 18:28:28.205786 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:28:28 crc kubenswrapper[4799]: E0121 18:28:28.206687 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:28:43 crc kubenswrapper[4799]: I0121 18:28:43.205794 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:28:43 crc kubenswrapper[4799]: E0121 18:28:43.206532 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.587415 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rccwj"] Jan 21 18:28:47 crc kubenswrapper[4799]: E0121 18:28:47.588616 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerName="extract-content" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.588635 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerName="extract-content" Jan 21 18:28:47 crc kubenswrapper[4799]: E0121 18:28:47.588668 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerName="extract-utilities" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.588677 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerName="extract-utilities" Jan 21 18:28:47 crc kubenswrapper[4799]: E0121 18:28:47.588708 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerName="registry-server" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.588716 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerName="registry-server" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.588997 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d747413-62f7-4d5e-be6e-9be00e3c279a" containerName="registry-server" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.590992 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.606908 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rccwj"] Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.700257 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-utilities\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.700346 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlx5r\" (UniqueName: \"kubernetes.io/projected/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-kube-api-access-dlx5r\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.700714 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-catalog-content\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.803048 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-utilities\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.803118 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlx5r\" (UniqueName: \"kubernetes.io/projected/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-kube-api-access-dlx5r\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.803198 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-catalog-content\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.803767 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-catalog-content\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.804024 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-utilities\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.828326 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlx5r\" (UniqueName: \"kubernetes.io/projected/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-kube-api-access-dlx5r\") pod \"redhat-operators-rccwj\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:47 crc kubenswrapper[4799]: I0121 18:28:47.929561 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:28:48 crc kubenswrapper[4799]: I0121 18:28:48.429348 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rccwj"] Jan 21 18:28:48 crc kubenswrapper[4799]: I0121 18:28:48.839009 4799 generic.go:334] "Generic (PLEG): container finished" podID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerID="38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd" exitCode=0 Jan 21 18:28:48 crc kubenswrapper[4799]: I0121 18:28:48.839102 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rccwj" event={"ID":"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88","Type":"ContainerDied","Data":"38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd"} Jan 21 18:28:48 crc kubenswrapper[4799]: I0121 18:28:48.839317 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rccwj" event={"ID":"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88","Type":"ContainerStarted","Data":"833facd3f51fb7d35d211dd7723e8e4b99cd67d38649d6b5e0d5308e040e736d"} Jan 21 18:28:48 crc kubenswrapper[4799]: I0121 18:28:48.841363 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:28:50 crc kubenswrapper[4799]: I0121 18:28:50.864339 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rccwj" event={"ID":"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88","Type":"ContainerStarted","Data":"b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003"} Jan 21 18:28:54 crc kubenswrapper[4799]: I0121 18:28:54.212544 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:28:54 crc kubenswrapper[4799]: E0121 18:28:54.213311 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:28:54 crc kubenswrapper[4799]: I0121 18:28:54.909674 4799 generic.go:334] "Generic (PLEG): container finished" podID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerID="b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003" exitCode=0 Jan 21 18:28:54 crc kubenswrapper[4799]: I0121 18:28:54.909740 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rccwj" event={"ID":"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88","Type":"ContainerDied","Data":"b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003"} Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.494581 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cskdc"] Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.497760 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.515228 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cskdc"] Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.626690 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-catalog-content\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.627093 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-utilities\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.627222 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8kfw\" (UniqueName: \"kubernetes.io/projected/1de55946-516e-4730-bc05-e43700652044-kube-api-access-z8kfw\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.728920 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8kfw\" (UniqueName: \"kubernetes.io/projected/1de55946-516e-4730-bc05-e43700652044-kube-api-access-z8kfw\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.729104 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-catalog-content\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.729216 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-utilities\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.729723 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-catalog-content\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.729785 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-utilities\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.761877 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8kfw\" (UniqueName: \"kubernetes.io/projected/1de55946-516e-4730-bc05-e43700652044-kube-api-access-z8kfw\") pod \"redhat-marketplace-cskdc\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:56 crc kubenswrapper[4799]: I0121 18:28:56.854138 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:28:57 crc kubenswrapper[4799]: I0121 18:28:57.458016 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cskdc"] Jan 21 18:28:57 crc kubenswrapper[4799]: I0121 18:28:57.962637 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cskdc" event={"ID":"1de55946-516e-4730-bc05-e43700652044","Type":"ContainerStarted","Data":"5427ee8d25ae1e6dde9093c2d55376e761ce03fd0d4a6f1ad5e7381e7d0c3ea4"} Jan 21 18:28:59 crc kubenswrapper[4799]: I0121 18:28:59.984337 4799 generic.go:334] "Generic (PLEG): container finished" podID="1de55946-516e-4730-bc05-e43700652044" containerID="12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c" exitCode=0 Jan 21 18:28:59 crc kubenswrapper[4799]: I0121 18:28:59.984454 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cskdc" event={"ID":"1de55946-516e-4730-bc05-e43700652044","Type":"ContainerDied","Data":"12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c"} Jan 21 18:29:02 crc kubenswrapper[4799]: I0121 18:29:02.011367 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rccwj" event={"ID":"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88","Type":"ContainerStarted","Data":"706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2"} Jan 21 18:29:03 crc kubenswrapper[4799]: I0121 18:29:03.026300 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cskdc" event={"ID":"1de55946-516e-4730-bc05-e43700652044","Type":"ContainerStarted","Data":"a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6"} Jan 21 18:29:03 crc kubenswrapper[4799]: I0121 18:29:03.084589 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rccwj" podStartSLOduration=3.633103445 podStartE2EDuration="16.084566717s" podCreationTimestamp="2026-01-21 18:28:47 +0000 UTC" firstStartedPulling="2026-01-21 18:28:48.841104811 +0000 UTC m=+3355.467394834" lastFinishedPulling="2026-01-21 18:29:01.292568083 +0000 UTC m=+3367.918858106" observedRunningTime="2026-01-21 18:29:03.057691688 +0000 UTC m=+3369.683981721" watchObservedRunningTime="2026-01-21 18:29:03.084566717 +0000 UTC m=+3369.710856740" Jan 21 18:29:04 crc kubenswrapper[4799]: I0121 18:29:04.042041 4799 generic.go:334] "Generic (PLEG): container finished" podID="1de55946-516e-4730-bc05-e43700652044" containerID="a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6" exitCode=0 Jan 21 18:29:04 crc kubenswrapper[4799]: I0121 18:29:04.042119 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cskdc" event={"ID":"1de55946-516e-4730-bc05-e43700652044","Type":"ContainerDied","Data":"a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6"} Jan 21 18:29:05 crc kubenswrapper[4799]: I0121 18:29:05.061888 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cskdc" event={"ID":"1de55946-516e-4730-bc05-e43700652044","Type":"ContainerStarted","Data":"e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b"} Jan 21 18:29:05 crc kubenswrapper[4799]: I0121 18:29:05.085982 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cskdc" podStartSLOduration=5.939936022 podStartE2EDuration="9.085961891s" podCreationTimestamp="2026-01-21 18:28:56 +0000 UTC" firstStartedPulling="2026-01-21 18:29:01.290550625 +0000 UTC m=+3367.916840648" lastFinishedPulling="2026-01-21 18:29:04.436576494 +0000 UTC m=+3371.062866517" observedRunningTime="2026-01-21 18:29:05.080390791 +0000 UTC m=+3371.706680834" watchObservedRunningTime="2026-01-21 18:29:05.085961891 +0000 UTC m=+3371.712251914" Jan 21 18:29:06 crc kubenswrapper[4799]: I0121 18:29:06.205494 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:29:06 crc kubenswrapper[4799]: E0121 18:29:06.206166 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:29:06 crc kubenswrapper[4799]: I0121 18:29:06.854722 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:29:06 crc kubenswrapper[4799]: I0121 18:29:06.854775 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:29:06 crc kubenswrapper[4799]: I0121 18:29:06.914629 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:29:07 crc kubenswrapper[4799]: I0121 18:29:07.929768 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:29:07 crc kubenswrapper[4799]: I0121 18:29:07.932052 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:29:08 crc kubenswrapper[4799]: I0121 18:29:08.976653 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rccwj" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="registry-server" probeResult="failure" output=< Jan 21 18:29:08 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 18:29:08 crc kubenswrapper[4799]: > Jan 21 18:29:16 crc kubenswrapper[4799]: I0121 18:29:16.909606 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:29:16 crc kubenswrapper[4799]: I0121 18:29:16.978478 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cskdc"] Jan 21 18:29:17 crc kubenswrapper[4799]: I0121 18:29:17.185825 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cskdc" podUID="1de55946-516e-4730-bc05-e43700652044" containerName="registry-server" containerID="cri-o://e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b" gracePeriod=2 Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.724033 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.825379 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8kfw\" (UniqueName: \"kubernetes.io/projected/1de55946-516e-4730-bc05-e43700652044-kube-api-access-z8kfw\") pod \"1de55946-516e-4730-bc05-e43700652044\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.825556 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-catalog-content\") pod \"1de55946-516e-4730-bc05-e43700652044\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.825589 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-utilities\") pod \"1de55946-516e-4730-bc05-e43700652044\" (UID: \"1de55946-516e-4730-bc05-e43700652044\") " Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.826398 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-utilities" (OuterVolumeSpecName: "utilities") pod "1de55946-516e-4730-bc05-e43700652044" (UID: "1de55946-516e-4730-bc05-e43700652044"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.832380 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1de55946-516e-4730-bc05-e43700652044-kube-api-access-z8kfw" (OuterVolumeSpecName: "kube-api-access-z8kfw") pod "1de55946-516e-4730-bc05-e43700652044" (UID: "1de55946-516e-4730-bc05-e43700652044"). InnerVolumeSpecName "kube-api-access-z8kfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.848220 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1de55946-516e-4730-bc05-e43700652044" (UID: "1de55946-516e-4730-bc05-e43700652044"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.928534 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.928562 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1de55946-516e-4730-bc05-e43700652044-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.928572 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8kfw\" (UniqueName: \"kubernetes.io/projected/1de55946-516e-4730-bc05-e43700652044-kube-api-access-z8kfw\") on node \"crc\" DevicePath \"\"" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:17.987920 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.037646 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.197842 4799 generic.go:334] "Generic (PLEG): container finished" podID="1de55946-516e-4730-bc05-e43700652044" containerID="e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b" exitCode=0 Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.197934 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cskdc" event={"ID":"1de55946-516e-4730-bc05-e43700652044","Type":"ContainerDied","Data":"e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b"} Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.197931 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cskdc" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.197981 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cskdc" event={"ID":"1de55946-516e-4730-bc05-e43700652044","Type":"ContainerDied","Data":"5427ee8d25ae1e6dde9093c2d55376e761ce03fd0d4a6f1ad5e7381e7d0c3ea4"} Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.198007 4799 scope.go:117] "RemoveContainer" containerID="e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.230655 4799 scope.go:117] "RemoveContainer" containerID="a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.256238 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cskdc"] Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.259568 4799 scope.go:117] "RemoveContainer" containerID="12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.272730 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cskdc"] Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.313188 4799 scope.go:117] "RemoveContainer" containerID="e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b" Jan 21 18:29:18 crc kubenswrapper[4799]: E0121 18:29:18.313641 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b\": container with ID starting with e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b not found: ID does not exist" containerID="e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.313675 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b"} err="failed to get container status \"e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b\": rpc error: code = NotFound desc = could not find container \"e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b\": container with ID starting with e2e8e6be9f5e813fe1d7430baab96e36fde1b5b255e7b3c07242955ddc362a4b not found: ID does not exist" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.313703 4799 scope.go:117] "RemoveContainer" containerID="a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6" Jan 21 18:29:18 crc kubenswrapper[4799]: E0121 18:29:18.313940 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6\": container with ID starting with a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6 not found: ID does not exist" containerID="a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.313967 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6"} err="failed to get container status \"a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6\": rpc error: code = NotFound desc = could not find container \"a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6\": container with ID starting with a276bf19250438ea51081235475f53f979c891ef5c3ba1df7583559fd54232d6 not found: ID does not exist" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.313984 4799 scope.go:117] "RemoveContainer" containerID="12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c" Jan 21 18:29:18 crc kubenswrapper[4799]: E0121 18:29:18.314476 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c\": container with ID starting with 12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c not found: ID does not exist" containerID="12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c" Jan 21 18:29:18 crc kubenswrapper[4799]: I0121 18:29:18.314504 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c"} err="failed to get container status \"12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c\": rpc error: code = NotFound desc = could not find container \"12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c\": container with ID starting with 12609855a4107d951002a6887a61b4dc689b84b9ffdd6425aad6cf092d9d301c not found: ID does not exist" Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.153608 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rccwj"] Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.209415 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rccwj" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="registry-server" containerID="cri-o://706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2" gracePeriod=2 Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.740323 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.870089 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-catalog-content\") pod \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.870220 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-utilities\") pod \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.870462 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlx5r\" (UniqueName: \"kubernetes.io/projected/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-kube-api-access-dlx5r\") pod \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\" (UID: \"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88\") " Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.871143 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-utilities" (OuterVolumeSpecName: "utilities") pod "b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" (UID: "b7a77c06-12b4-48e5-b1bf-9772a4b8bb88"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.872038 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.876983 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-kube-api-access-dlx5r" (OuterVolumeSpecName: "kube-api-access-dlx5r") pod "b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" (UID: "b7a77c06-12b4-48e5-b1bf-9772a4b8bb88"). InnerVolumeSpecName "kube-api-access-dlx5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.973916 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlx5r\" (UniqueName: \"kubernetes.io/projected/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-kube-api-access-dlx5r\") on node \"crc\" DevicePath \"\"" Jan 21 18:29:19 crc kubenswrapper[4799]: I0121 18:29:19.979875 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" (UID: "b7a77c06-12b4-48e5-b1bf-9772a4b8bb88"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.076270 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.206048 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:29:20 crc kubenswrapper[4799]: E0121 18:29:20.206363 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.225457 4799 generic.go:334] "Generic (PLEG): container finished" podID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerID="706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2" exitCode=0 Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.225618 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rccwj" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.247655 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1de55946-516e-4730-bc05-e43700652044" path="/var/lib/kubelet/pods/1de55946-516e-4730-bc05-e43700652044/volumes" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.248948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rccwj" event={"ID":"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88","Type":"ContainerDied","Data":"706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2"} Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.248985 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rccwj" event={"ID":"b7a77c06-12b4-48e5-b1bf-9772a4b8bb88","Type":"ContainerDied","Data":"833facd3f51fb7d35d211dd7723e8e4b99cd67d38649d6b5e0d5308e040e736d"} Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.250505 4799 scope.go:117] "RemoveContainer" containerID="706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.337439 4799 scope.go:117] "RemoveContainer" containerID="b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.355209 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rccwj"] Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.364493 4799 scope.go:117] "RemoveContainer" containerID="38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.376257 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rccwj"] Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.842687 4799 scope.go:117] "RemoveContainer" containerID="706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2" Jan 21 18:29:20 crc kubenswrapper[4799]: E0121 18:29:20.843157 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2\": container with ID starting with 706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2 not found: ID does not exist" containerID="706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.843188 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2"} err="failed to get container status \"706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2\": rpc error: code = NotFound desc = could not find container \"706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2\": container with ID starting with 706fd70e28c39a6ec282311344cbfa6f7f4d245178fe15d424a8fbf7c7daecc2 not found: ID does not exist" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.843211 4799 scope.go:117] "RemoveContainer" containerID="b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003" Jan 21 18:29:20 crc kubenswrapper[4799]: E0121 18:29:20.843468 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003\": container with ID starting with b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003 not found: ID does not exist" containerID="b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.843486 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003"} err="failed to get container status \"b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003\": rpc error: code = NotFound desc = could not find container \"b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003\": container with ID starting with b3c67a2613e4d212ae0ec4281abf237e5fa445118049d02dc20310cc132c0003 not found: ID does not exist" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.843499 4799 scope.go:117] "RemoveContainer" containerID="38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd" Jan 21 18:29:20 crc kubenswrapper[4799]: E0121 18:29:20.843885 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd\": container with ID starting with 38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd not found: ID does not exist" containerID="38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd" Jan 21 18:29:20 crc kubenswrapper[4799]: I0121 18:29:20.843942 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd"} err="failed to get container status \"38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd\": rpc error: code = NotFound desc = could not find container \"38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd\": container with ID starting with 38bdbcf54bf0515693247e8ec8f707a8e3015ae691b0ee3fddbff2a2d199c3dd not found: ID does not exist" Jan 21 18:29:22 crc kubenswrapper[4799]: I0121 18:29:22.217206 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" path="/var/lib/kubelet/pods/b7a77c06-12b4-48e5-b1bf-9772a4b8bb88/volumes" Jan 21 18:29:32 crc kubenswrapper[4799]: I0121 18:29:32.206052 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:29:32 crc kubenswrapper[4799]: E0121 18:29:32.207012 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:29:45 crc kubenswrapper[4799]: I0121 18:29:45.206407 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:29:45 crc kubenswrapper[4799]: E0121 18:29:45.207682 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:29:58 crc kubenswrapper[4799]: I0121 18:29:58.205240 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:29:58 crc kubenswrapper[4799]: E0121 18:29:58.205961 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.161195 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk"] Jan 21 18:30:00 crc kubenswrapper[4799]: E0121 18:30:00.162054 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1de55946-516e-4730-bc05-e43700652044" containerName="extract-utilities" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.162068 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1de55946-516e-4730-bc05-e43700652044" containerName="extract-utilities" Jan 21 18:30:00 crc kubenswrapper[4799]: E0121 18:30:00.162077 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="extract-utilities" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.162084 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="extract-utilities" Jan 21 18:30:00 crc kubenswrapper[4799]: E0121 18:30:00.162097 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.162103 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4799]: E0121 18:30:00.162186 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1de55946-516e-4730-bc05-e43700652044" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.162197 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1de55946-516e-4730-bc05-e43700652044" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4799]: E0121 18:30:00.162222 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1de55946-516e-4730-bc05-e43700652044" containerName="extract-content" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.162231 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1de55946-516e-4730-bc05-e43700652044" containerName="extract-content" Jan 21 18:30:00 crc kubenswrapper[4799]: E0121 18:30:00.162253 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="extract-content" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.162264 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="extract-content" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.162492 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7a77c06-12b4-48e5-b1bf-9772a4b8bb88" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.162510 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1de55946-516e-4730-bc05-e43700652044" containerName="registry-server" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.163332 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.166325 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.166368 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.174344 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk"] Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.213655 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bcebec36-4bde-4f86-9253-47e839e4011e-secret-volume\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.213723 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bcebec36-4bde-4f86-9253-47e839e4011e-config-volume\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.213844 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf8tq\" (UniqueName: \"kubernetes.io/projected/bcebec36-4bde-4f86-9253-47e839e4011e-kube-api-access-zf8tq\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.316007 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bcebec36-4bde-4f86-9253-47e839e4011e-secret-volume\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.316072 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bcebec36-4bde-4f86-9253-47e839e4011e-config-volume\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.316105 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8tq\" (UniqueName: \"kubernetes.io/projected/bcebec36-4bde-4f86-9253-47e839e4011e-kube-api-access-zf8tq\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.317601 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bcebec36-4bde-4f86-9253-47e839e4011e-config-volume\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.326295 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bcebec36-4bde-4f86-9253-47e839e4011e-secret-volume\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.335754 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf8tq\" (UniqueName: \"kubernetes.io/projected/bcebec36-4bde-4f86-9253-47e839e4011e-kube-api-access-zf8tq\") pod \"collect-profiles-29483670-q9tnk\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.496763 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:00 crc kubenswrapper[4799]: I0121 18:30:00.985799 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk"] Jan 21 18:30:01 crc kubenswrapper[4799]: W0121 18:30:01.005208 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcebec36_4bde_4f86_9253_47e839e4011e.slice/crio-976638382ab08f35539e207961ac70762b9807ccb3632e556e7d55158737a784 WatchSource:0}: Error finding container 976638382ab08f35539e207961ac70762b9807ccb3632e556e7d55158737a784: Status 404 returned error can't find the container with id 976638382ab08f35539e207961ac70762b9807ccb3632e556e7d55158737a784 Jan 21 18:30:01 crc kubenswrapper[4799]: I0121 18:30:01.674296 4799 generic.go:334] "Generic (PLEG): container finished" podID="bcebec36-4bde-4f86-9253-47e839e4011e" containerID="c7a7177076137e8f2ff56f34d3481b9ec538925b10d6c1a0fef1c77c97b53b0e" exitCode=0 Jan 21 18:30:01 crc kubenswrapper[4799]: I0121 18:30:01.674389 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" event={"ID":"bcebec36-4bde-4f86-9253-47e839e4011e","Type":"ContainerDied","Data":"c7a7177076137e8f2ff56f34d3481b9ec538925b10d6c1a0fef1c77c97b53b0e"} Jan 21 18:30:01 crc kubenswrapper[4799]: I0121 18:30:01.674674 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" event={"ID":"bcebec36-4bde-4f86-9253-47e839e4011e","Type":"ContainerStarted","Data":"976638382ab08f35539e207961ac70762b9807ccb3632e556e7d55158737a784"} Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.065948 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.179554 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bcebec36-4bde-4f86-9253-47e839e4011e-config-volume\") pod \"bcebec36-4bde-4f86-9253-47e839e4011e\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.179675 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bcebec36-4bde-4f86-9253-47e839e4011e-secret-volume\") pod \"bcebec36-4bde-4f86-9253-47e839e4011e\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.179741 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf8tq\" (UniqueName: \"kubernetes.io/projected/bcebec36-4bde-4f86-9253-47e839e4011e-kube-api-access-zf8tq\") pod \"bcebec36-4bde-4f86-9253-47e839e4011e\" (UID: \"bcebec36-4bde-4f86-9253-47e839e4011e\") " Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.180577 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcebec36-4bde-4f86-9253-47e839e4011e-config-volume" (OuterVolumeSpecName: "config-volume") pod "bcebec36-4bde-4f86-9253-47e839e4011e" (UID: "bcebec36-4bde-4f86-9253-47e839e4011e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.188512 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcebec36-4bde-4f86-9253-47e839e4011e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bcebec36-4bde-4f86-9253-47e839e4011e" (UID: "bcebec36-4bde-4f86-9253-47e839e4011e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.188539 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcebec36-4bde-4f86-9253-47e839e4011e-kube-api-access-zf8tq" (OuterVolumeSpecName: "kube-api-access-zf8tq") pod "bcebec36-4bde-4f86-9253-47e839e4011e" (UID: "bcebec36-4bde-4f86-9253-47e839e4011e"). InnerVolumeSpecName "kube-api-access-zf8tq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.282705 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bcebec36-4bde-4f86-9253-47e839e4011e-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.282747 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bcebec36-4bde-4f86-9253-47e839e4011e-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.282757 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf8tq\" (UniqueName: \"kubernetes.io/projected/bcebec36-4bde-4f86-9253-47e839e4011e-kube-api-access-zf8tq\") on node \"crc\" DevicePath \"\"" Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.700073 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" event={"ID":"bcebec36-4bde-4f86-9253-47e839e4011e","Type":"ContainerDied","Data":"976638382ab08f35539e207961ac70762b9807ccb3632e556e7d55158737a784"} Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.700493 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="976638382ab08f35539e207961ac70762b9807ccb3632e556e7d55158737a784" Jan 21 18:30:03 crc kubenswrapper[4799]: I0121 18:30:03.700560 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk" Jan 21 18:30:04 crc kubenswrapper[4799]: I0121 18:30:04.155257 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8"] Jan 21 18:30:04 crc kubenswrapper[4799]: I0121 18:30:04.166031 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483625-xnkj8"] Jan 21 18:30:04 crc kubenswrapper[4799]: I0121 18:30:04.219078 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e314447a-b8d1-465f-99fb-4f684b091913" path="/var/lib/kubelet/pods/e314447a-b8d1-465f-99fb-4f684b091913/volumes" Jan 21 18:30:13 crc kubenswrapper[4799]: I0121 18:30:13.205711 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:30:13 crc kubenswrapper[4799]: E0121 18:30:13.206955 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:30:27 crc kubenswrapper[4799]: I0121 18:30:27.205628 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:30:27 crc kubenswrapper[4799]: I0121 18:30:27.975420 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"4ab52e9d0020ca9c8e7582dbafe42ab29d06c0927e751069069bb122154a3b5d"} Jan 21 18:30:40 crc kubenswrapper[4799]: I0121 18:30:40.436778 4799 scope.go:117] "RemoveContainer" containerID="c820d5fd6effdb8bd9d0232f67599b7193a0ed0ec2ddd9613513be1c3a8a8358" Jan 21 18:32:55 crc kubenswrapper[4799]: I0121 18:32:55.978887 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:32:55 crc kubenswrapper[4799]: I0121 18:32:55.979917 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:33:25 crc kubenswrapper[4799]: I0121 18:33:25.971408 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:33:25 crc kubenswrapper[4799]: I0121 18:33:25.972315 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:33:55 crc kubenswrapper[4799]: I0121 18:33:55.971340 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:33:55 crc kubenswrapper[4799]: I0121 18:33:55.971878 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:33:55 crc kubenswrapper[4799]: I0121 18:33:55.971930 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:33:55 crc kubenswrapper[4799]: I0121 18:33:55.972898 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4ab52e9d0020ca9c8e7582dbafe42ab29d06c0927e751069069bb122154a3b5d"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:33:55 crc kubenswrapper[4799]: I0121 18:33:55.972966 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://4ab52e9d0020ca9c8e7582dbafe42ab29d06c0927e751069069bb122154a3b5d" gracePeriod=600 Jan 21 18:33:56 crc kubenswrapper[4799]: I0121 18:33:56.395051 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="4ab52e9d0020ca9c8e7582dbafe42ab29d06c0927e751069069bb122154a3b5d" exitCode=0 Jan 21 18:33:56 crc kubenswrapper[4799]: I0121 18:33:56.395275 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"4ab52e9d0020ca9c8e7582dbafe42ab29d06c0927e751069069bb122154a3b5d"} Jan 21 18:33:56 crc kubenswrapper[4799]: I0121 18:33:56.395443 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4"} Jan 21 18:33:56 crc kubenswrapper[4799]: I0121 18:33:56.395470 4799 scope.go:117] "RemoveContainer" containerID="9ee42cbde13e1d994a6634e53c0e95238621286644080b0c53e275c9ecc3804d" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.448444 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k7nnk"] Jan 21 18:35:11 crc kubenswrapper[4799]: E0121 18:35:11.449645 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcebec36-4bde-4f86-9253-47e839e4011e" containerName="collect-profiles" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.449666 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcebec36-4bde-4f86-9253-47e839e4011e" containerName="collect-profiles" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.449916 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcebec36-4bde-4f86-9253-47e839e4011e" containerName="collect-profiles" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.467808 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.509328 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k7nnk"] Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.513799 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-catalog-content\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.513999 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-697db\" (UniqueName: \"kubernetes.io/projected/c748650e-351c-4d6c-b16c-6fa29a40a377-kube-api-access-697db\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.514043 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-utilities\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.616377 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-catalog-content\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.616482 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-697db\" (UniqueName: \"kubernetes.io/projected/c748650e-351c-4d6c-b16c-6fa29a40a377-kube-api-access-697db\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.616510 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-utilities\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.617070 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-utilities\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.617090 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-catalog-content\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.643840 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-czjf6"] Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.646582 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.647054 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-697db\" (UniqueName: \"kubernetes.io/projected/c748650e-351c-4d6c-b16c-6fa29a40a377-kube-api-access-697db\") pod \"certified-operators-k7nnk\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.674530 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-czjf6"] Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.718819 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-catalog-content\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.718942 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcrq9\" (UniqueName: \"kubernetes.io/projected/a15ef8cc-16a5-40ff-a0bc-907be9e45699-kube-api-access-tcrq9\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.718975 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-utilities\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.817244 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.820625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-catalog-content\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.821422 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcrq9\" (UniqueName: \"kubernetes.io/projected/a15ef8cc-16a5-40ff-a0bc-907be9e45699-kube-api-access-tcrq9\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.821835 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-utilities\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.821295 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-catalog-content\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.822157 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-utilities\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:11 crc kubenswrapper[4799]: I0121 18:35:11.863426 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcrq9\" (UniqueName: \"kubernetes.io/projected/a15ef8cc-16a5-40ff-a0bc-907be9e45699-kube-api-access-tcrq9\") pod \"community-operators-czjf6\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:12 crc kubenswrapper[4799]: I0121 18:35:12.034916 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:12 crc kubenswrapper[4799]: I0121 18:35:12.511110 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k7nnk"] Jan 21 18:35:13 crc kubenswrapper[4799]: I0121 18:35:13.220505 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-czjf6"] Jan 21 18:35:13 crc kubenswrapper[4799]: I0121 18:35:13.267357 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7nnk" event={"ID":"c748650e-351c-4d6c-b16c-6fa29a40a377","Type":"ContainerStarted","Data":"31bbffaf4f8373f975fd80725197630f67e8d39b93623543b33c0bb84420d38c"} Jan 21 18:35:13 crc kubenswrapper[4799]: I0121 18:35:13.268082 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7nnk" event={"ID":"c748650e-351c-4d6c-b16c-6fa29a40a377","Type":"ContainerStarted","Data":"8e6d7a4942e24a09e0dc1a670c7fc64a9d1507ad15a6f5f8e576870080991f0b"} Jan 21 18:35:13 crc kubenswrapper[4799]: I0121 18:35:13.272002 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-czjf6" event={"ID":"a15ef8cc-16a5-40ff-a0bc-907be9e45699","Type":"ContainerStarted","Data":"f51d062264dd658065d6fb0a57f367f6cbdfc4f3c351a3aa87fe76bacba8515a"} Jan 21 18:35:14 crc kubenswrapper[4799]: I0121 18:35:14.284267 4799 generic.go:334] "Generic (PLEG): container finished" podID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerID="31bbffaf4f8373f975fd80725197630f67e8d39b93623543b33c0bb84420d38c" exitCode=0 Jan 21 18:35:14 crc kubenswrapper[4799]: I0121 18:35:14.284599 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7nnk" event={"ID":"c748650e-351c-4d6c-b16c-6fa29a40a377","Type":"ContainerDied","Data":"31bbffaf4f8373f975fd80725197630f67e8d39b93623543b33c0bb84420d38c"} Jan 21 18:35:14 crc kubenswrapper[4799]: I0121 18:35:14.286395 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:35:14 crc kubenswrapper[4799]: I0121 18:35:14.287404 4799 generic.go:334] "Generic (PLEG): container finished" podID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerID="7c3fb1c8c2b148c1b2bcb489aa0a7d40db071b5d34c1cf4935ea6b59d34fed2f" exitCode=0 Jan 21 18:35:14 crc kubenswrapper[4799]: I0121 18:35:14.287449 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-czjf6" event={"ID":"a15ef8cc-16a5-40ff-a0bc-907be9e45699","Type":"ContainerDied","Data":"7c3fb1c8c2b148c1b2bcb489aa0a7d40db071b5d34c1cf4935ea6b59d34fed2f"} Jan 21 18:35:15 crc kubenswrapper[4799]: I0121 18:35:15.304957 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-czjf6" event={"ID":"a15ef8cc-16a5-40ff-a0bc-907be9e45699","Type":"ContainerStarted","Data":"6270fc67853b15c3ff3b20f24c8fce453575409dbe83f5e573a7843fe1553355"} Jan 21 18:35:17 crc kubenswrapper[4799]: I0121 18:35:17.330363 4799 generic.go:334] "Generic (PLEG): container finished" podID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerID="6270fc67853b15c3ff3b20f24c8fce453575409dbe83f5e573a7843fe1553355" exitCode=0 Jan 21 18:35:17 crc kubenswrapper[4799]: I0121 18:35:17.330484 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-czjf6" event={"ID":"a15ef8cc-16a5-40ff-a0bc-907be9e45699","Type":"ContainerDied","Data":"6270fc67853b15c3ff3b20f24c8fce453575409dbe83f5e573a7843fe1553355"} Jan 21 18:35:20 crc kubenswrapper[4799]: I0121 18:35:20.370717 4799 generic.go:334] "Generic (PLEG): container finished" podID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerID="f2565cf586bf049a6605610c718b8dcbffcbe17dcda4a1d00fde81e298e3443b" exitCode=0 Jan 21 18:35:20 crc kubenswrapper[4799]: I0121 18:35:20.370801 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7nnk" event={"ID":"c748650e-351c-4d6c-b16c-6fa29a40a377","Type":"ContainerDied","Data":"f2565cf586bf049a6605610c718b8dcbffcbe17dcda4a1d00fde81e298e3443b"} Jan 21 18:35:21 crc kubenswrapper[4799]: I0121 18:35:21.383773 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7nnk" event={"ID":"c748650e-351c-4d6c-b16c-6fa29a40a377","Type":"ContainerStarted","Data":"555d8039e2c6b81a476c185f145b50dea0cc3b8f0613308e238c3c56f727d96f"} Jan 21 18:35:21 crc kubenswrapper[4799]: I0121 18:35:21.386660 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-czjf6" event={"ID":"a15ef8cc-16a5-40ff-a0bc-907be9e45699","Type":"ContainerStarted","Data":"a802be8ced9ef403efc7131d87262a34bc6a5b40f374ecf9bf7641c553d7b6de"} Jan 21 18:35:21 crc kubenswrapper[4799]: I0121 18:35:21.413339 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k7nnk" podStartSLOduration=3.886282314 podStartE2EDuration="10.413319234s" podCreationTimestamp="2026-01-21 18:35:11 +0000 UTC" firstStartedPulling="2026-01-21 18:35:14.286108049 +0000 UTC m=+3740.912398072" lastFinishedPulling="2026-01-21 18:35:20.813144969 +0000 UTC m=+3747.439434992" observedRunningTime="2026-01-21 18:35:21.404544054 +0000 UTC m=+3748.030834077" watchObservedRunningTime="2026-01-21 18:35:21.413319234 +0000 UTC m=+3748.039609257" Jan 21 18:35:21 crc kubenswrapper[4799]: I0121 18:35:21.428329 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-czjf6" podStartSLOduration=4.463825474 podStartE2EDuration="10.428309171s" podCreationTimestamp="2026-01-21 18:35:11 +0000 UTC" firstStartedPulling="2026-01-21 18:35:14.289168046 +0000 UTC m=+3740.915458069" lastFinishedPulling="2026-01-21 18:35:20.253651743 +0000 UTC m=+3746.879941766" observedRunningTime="2026-01-21 18:35:21.419937593 +0000 UTC m=+3748.046227636" watchObservedRunningTime="2026-01-21 18:35:21.428309171 +0000 UTC m=+3748.054599204" Jan 21 18:35:21 crc kubenswrapper[4799]: I0121 18:35:21.817863 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:21 crc kubenswrapper[4799]: I0121 18:35:21.817929 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:22 crc kubenswrapper[4799]: I0121 18:35:22.035317 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:22 crc kubenswrapper[4799]: I0121 18:35:22.035377 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:22 crc kubenswrapper[4799]: I0121 18:35:22.865068 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-k7nnk" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="registry-server" probeResult="failure" output=< Jan 21 18:35:22 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 18:35:22 crc kubenswrapper[4799]: > Jan 21 18:35:23 crc kubenswrapper[4799]: I0121 18:35:23.086907 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-czjf6" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="registry-server" probeResult="failure" output=< Jan 21 18:35:23 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 18:35:23 crc kubenswrapper[4799]: > Jan 21 18:35:31 crc kubenswrapper[4799]: I0121 18:35:31.865224 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:31 crc kubenswrapper[4799]: I0121 18:35:31.932996 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 18:35:32 crc kubenswrapper[4799]: I0121 18:35:32.008806 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k7nnk"] Jan 21 18:35:32 crc kubenswrapper[4799]: I0121 18:35:32.085635 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:32 crc kubenswrapper[4799]: I0121 18:35:32.111213 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rf9sq"] Jan 21 18:35:32 crc kubenswrapper[4799]: I0121 18:35:32.111544 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rf9sq" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerName="registry-server" containerID="cri-o://4e144aeae3c67f438e600863035b360fc5e82ca00c7098b1101545a0d22a6f01" gracePeriod=2 Jan 21 18:35:32 crc kubenswrapper[4799]: I0121 18:35:32.155608 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:33 crc kubenswrapper[4799]: I0121 18:35:33.535089 4799 generic.go:334] "Generic (PLEG): container finished" podID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerID="4e144aeae3c67f438e600863035b360fc5e82ca00c7098b1101545a0d22a6f01" exitCode=0 Jan 21 18:35:33 crc kubenswrapper[4799]: I0121 18:35:33.535329 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf9sq" event={"ID":"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac","Type":"ContainerDied","Data":"4e144aeae3c67f438e600863035b360fc5e82ca00c7098b1101545a0d22a6f01"} Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:33.999662 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.165473 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-utilities\") pod \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.165617 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-catalog-content\") pod \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.165798 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6244\" (UniqueName: \"kubernetes.io/projected/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-kube-api-access-m6244\") pod \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\" (UID: \"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac\") " Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.181254 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-kube-api-access-m6244" (OuterVolumeSpecName: "kube-api-access-m6244") pod "27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" (UID: "27f13fdd-2f93-4f6a-9dfd-953489f9a5ac"). InnerVolumeSpecName "kube-api-access-m6244". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.200406 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-utilities" (OuterVolumeSpecName: "utilities") pod "27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" (UID: "27f13fdd-2f93-4f6a-9dfd-953489f9a5ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.269960 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.270559 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6244\" (UniqueName: \"kubernetes.io/projected/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-kube-api-access-m6244\") on node \"crc\" DevicePath \"\"" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.302511 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-czjf6"] Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.302796 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-czjf6" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="registry-server" containerID="cri-o://a802be8ced9ef403efc7131d87262a34bc6a5b40f374ecf9bf7641c553d7b6de" gracePeriod=2 Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.385282 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" (UID: "27f13fdd-2f93-4f6a-9dfd-953489f9a5ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.474930 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.546173 4799 generic.go:334] "Generic (PLEG): container finished" podID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerID="a802be8ced9ef403efc7131d87262a34bc6a5b40f374ecf9bf7641c553d7b6de" exitCode=0 Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.546251 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-czjf6" event={"ID":"a15ef8cc-16a5-40ff-a0bc-907be9e45699","Type":"ContainerDied","Data":"a802be8ced9ef403efc7131d87262a34bc6a5b40f374ecf9bf7641c553d7b6de"} Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.548682 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf9sq" event={"ID":"27f13fdd-2f93-4f6a-9dfd-953489f9a5ac","Type":"ContainerDied","Data":"652d0b009b7dededdcdc1d9b634ada2fd6927e0fcb586fa3c49f3cc165aed858"} Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.548738 4799 scope.go:117] "RemoveContainer" containerID="4e144aeae3c67f438e600863035b360fc5e82ca00c7098b1101545a0d22a6f01" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.548805 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rf9sq" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.575463 4799 scope.go:117] "RemoveContainer" containerID="225709d76422d5e6eb054ef11d56dbb4703f27b61c27c82f2f7e48b880b81c47" Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.595977 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rf9sq"] Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.616950 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rf9sq"] Jan 21 18:35:34 crc kubenswrapper[4799]: I0121 18:35:34.623637 4799 scope.go:117] "RemoveContainer" containerID="a286b028acb7f6693ff516df3b6636eb8f785888c026d36173802d5039fda2e9" Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.566455 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-czjf6" event={"ID":"a15ef8cc-16a5-40ff-a0bc-907be9e45699","Type":"ContainerDied","Data":"f51d062264dd658065d6fb0a57f367f6cbdfc4f3c351a3aa87fe76bacba8515a"} Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.566713 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f51d062264dd658065d6fb0a57f367f6cbdfc4f3c351a3aa87fe76bacba8515a" Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.631085 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.803608 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-catalog-content\") pod \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.803653 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-utilities\") pod \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.803870 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcrq9\" (UniqueName: \"kubernetes.io/projected/a15ef8cc-16a5-40ff-a0bc-907be9e45699-kube-api-access-tcrq9\") pod \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\" (UID: \"a15ef8cc-16a5-40ff-a0bc-907be9e45699\") " Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.804553 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-utilities" (OuterVolumeSpecName: "utilities") pod "a15ef8cc-16a5-40ff-a0bc-907be9e45699" (UID: "a15ef8cc-16a5-40ff-a0bc-907be9e45699"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.810684 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a15ef8cc-16a5-40ff-a0bc-907be9e45699-kube-api-access-tcrq9" (OuterVolumeSpecName: "kube-api-access-tcrq9") pod "a15ef8cc-16a5-40ff-a0bc-907be9e45699" (UID: "a15ef8cc-16a5-40ff-a0bc-907be9e45699"). InnerVolumeSpecName "kube-api-access-tcrq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.869926 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a15ef8cc-16a5-40ff-a0bc-907be9e45699" (UID: "a15ef8cc-16a5-40ff-a0bc-907be9e45699"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.907276 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.907352 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15ef8cc-16a5-40ff-a0bc-907be9e45699-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:35:35 crc kubenswrapper[4799]: I0121 18:35:35.907369 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcrq9\" (UniqueName: \"kubernetes.io/projected/a15ef8cc-16a5-40ff-a0bc-907be9e45699-kube-api-access-tcrq9\") on node \"crc\" DevicePath \"\"" Jan 21 18:35:36 crc kubenswrapper[4799]: I0121 18:35:36.224026 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" path="/var/lib/kubelet/pods/27f13fdd-2f93-4f6a-9dfd-953489f9a5ac/volumes" Jan 21 18:35:36 crc kubenswrapper[4799]: I0121 18:35:36.578493 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-czjf6" Jan 21 18:35:36 crc kubenswrapper[4799]: I0121 18:35:36.609984 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-czjf6"] Jan 21 18:35:36 crc kubenswrapper[4799]: I0121 18:35:36.623907 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-czjf6"] Jan 21 18:35:38 crc kubenswrapper[4799]: I0121 18:35:38.229685 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" path="/var/lib/kubelet/pods/a15ef8cc-16a5-40ff-a0bc-907be9e45699/volumes" Jan 21 18:36:25 crc kubenswrapper[4799]: I0121 18:36:25.971318 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:36:25 crc kubenswrapper[4799]: I0121 18:36:25.971975 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:36:55 crc kubenswrapper[4799]: I0121 18:36:55.970898 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:36:55 crc kubenswrapper[4799]: I0121 18:36:55.971365 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:37:25 crc kubenswrapper[4799]: I0121 18:37:25.970552 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:37:25 crc kubenswrapper[4799]: I0121 18:37:25.971091 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:37:25 crc kubenswrapper[4799]: I0121 18:37:25.971160 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:37:25 crc kubenswrapper[4799]: I0121 18:37:25.972209 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:37:25 crc kubenswrapper[4799]: I0121 18:37:25.972273 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" gracePeriod=600 Jan 21 18:37:26 crc kubenswrapper[4799]: E0121 18:37:26.100634 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:37:26 crc kubenswrapper[4799]: I0121 18:37:26.824535 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" exitCode=0 Jan 21 18:37:26 crc kubenswrapper[4799]: I0121 18:37:26.824597 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4"} Jan 21 18:37:26 crc kubenswrapper[4799]: I0121 18:37:26.824643 4799 scope.go:117] "RemoveContainer" containerID="4ab52e9d0020ca9c8e7582dbafe42ab29d06c0927e751069069bb122154a3b5d" Jan 21 18:37:26 crc kubenswrapper[4799]: I0121 18:37:26.825719 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:37:26 crc kubenswrapper[4799]: E0121 18:37:26.826247 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:37:40 crc kubenswrapper[4799]: I0121 18:37:40.206461 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:37:40 crc kubenswrapper[4799]: E0121 18:37:40.207515 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:37:54 crc kubenswrapper[4799]: I0121 18:37:54.213735 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:37:54 crc kubenswrapper[4799]: E0121 18:37:54.215329 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:38:09 crc kubenswrapper[4799]: I0121 18:38:09.206800 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:38:09 crc kubenswrapper[4799]: E0121 18:38:09.207855 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:38:23 crc kubenswrapper[4799]: I0121 18:38:23.205771 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:38:23 crc kubenswrapper[4799]: E0121 18:38:23.206759 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:38:34 crc kubenswrapper[4799]: I0121 18:38:34.211392 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:38:34 crc kubenswrapper[4799]: E0121 18:38:34.212210 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:38:45 crc kubenswrapper[4799]: I0121 18:38:45.205440 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:38:45 crc kubenswrapper[4799]: E0121 18:38:45.206467 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:39:00 crc kubenswrapper[4799]: I0121 18:39:00.204767 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:39:00 crc kubenswrapper[4799]: E0121 18:39:00.206761 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:39:12 crc kubenswrapper[4799]: I0121 18:39:12.205621 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:39:12 crc kubenswrapper[4799]: E0121 18:39:12.206563 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:39:26 crc kubenswrapper[4799]: I0121 18:39:26.205563 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:39:26 crc kubenswrapper[4799]: E0121 18:39:26.206411 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:39:39 crc kubenswrapper[4799]: I0121 18:39:39.206246 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:39:39 crc kubenswrapper[4799]: E0121 18:39:39.206965 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:39:52 crc kubenswrapper[4799]: I0121 18:39:52.206003 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:39:52 crc kubenswrapper[4799]: E0121 18:39:52.207220 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.954482 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d8hm2"] Jan 21 18:40:01 crc kubenswrapper[4799]: E0121 18:40:01.955910 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerName="extract-content" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.955933 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerName="extract-content" Jan 21 18:40:01 crc kubenswrapper[4799]: E0121 18:40:01.955960 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="extract-content" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.955972 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="extract-content" Jan 21 18:40:01 crc kubenswrapper[4799]: E0121 18:40:01.955988 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerName="extract-utilities" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.956002 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerName="extract-utilities" Jan 21 18:40:01 crc kubenswrapper[4799]: E0121 18:40:01.956027 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="extract-utilities" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.956039 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="extract-utilities" Jan 21 18:40:01 crc kubenswrapper[4799]: E0121 18:40:01.956095 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerName="registry-server" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.956106 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerName="registry-server" Jan 21 18:40:01 crc kubenswrapper[4799]: E0121 18:40:01.956246 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="registry-server" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.956260 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="registry-server" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.956641 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="27f13fdd-2f93-4f6a-9dfd-953489f9a5ac" containerName="registry-server" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.956671 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a15ef8cc-16a5-40ff-a0bc-907be9e45699" containerName="registry-server" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.959498 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:01 crc kubenswrapper[4799]: I0121 18:40:01.973632 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8hm2"] Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.104802 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-utilities\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.104988 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nssfj\" (UniqueName: \"kubernetes.io/projected/725867c6-9e96-4acd-87d3-9a82725c6fb2-kube-api-access-nssfj\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.105014 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-catalog-content\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.206977 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-utilities\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.207187 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nssfj\" (UniqueName: \"kubernetes.io/projected/725867c6-9e96-4acd-87d3-9a82725c6fb2-kube-api-access-nssfj\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.207230 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-catalog-content\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.207590 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-utilities\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.207633 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-catalog-content\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.228420 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nssfj\" (UniqueName: \"kubernetes.io/projected/725867c6-9e96-4acd-87d3-9a82725c6fb2-kube-api-access-nssfj\") pod \"redhat-marketplace-d8hm2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.290637 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:02 crc kubenswrapper[4799]: I0121 18:40:02.799296 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8hm2"] Jan 21 18:40:03 crc kubenswrapper[4799]: I0121 18:40:03.772960 4799 generic.go:334] "Generic (PLEG): container finished" podID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerID="baeffbc1da7210e62e3d4fba64a9a06cda458ba5e971272abaea70c76d1b128a" exitCode=0 Jan 21 18:40:03 crc kubenswrapper[4799]: I0121 18:40:03.773032 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8hm2" event={"ID":"725867c6-9e96-4acd-87d3-9a82725c6fb2","Type":"ContainerDied","Data":"baeffbc1da7210e62e3d4fba64a9a06cda458ba5e971272abaea70c76d1b128a"} Jan 21 18:40:03 crc kubenswrapper[4799]: I0121 18:40:03.773365 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8hm2" event={"ID":"725867c6-9e96-4acd-87d3-9a82725c6fb2","Type":"ContainerStarted","Data":"d44214af2c4671da8b30beabbb6042973532ee71ad0bbed5cf617b316302f324"} Jan 21 18:40:04 crc kubenswrapper[4799]: I0121 18:40:04.786797 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8hm2" event={"ID":"725867c6-9e96-4acd-87d3-9a82725c6fb2","Type":"ContainerStarted","Data":"e02972aaf678fabb3589a5091d5938e5b2a20a1779fee8275e42b2e65dac71f9"} Jan 21 18:40:05 crc kubenswrapper[4799]: I0121 18:40:05.206513 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:40:05 crc kubenswrapper[4799]: E0121 18:40:05.207010 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:40:05 crc kubenswrapper[4799]: I0121 18:40:05.797007 4799 generic.go:334] "Generic (PLEG): container finished" podID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerID="e02972aaf678fabb3589a5091d5938e5b2a20a1779fee8275e42b2e65dac71f9" exitCode=0 Jan 21 18:40:05 crc kubenswrapper[4799]: I0121 18:40:05.797622 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8hm2" event={"ID":"725867c6-9e96-4acd-87d3-9a82725c6fb2","Type":"ContainerDied","Data":"e02972aaf678fabb3589a5091d5938e5b2a20a1779fee8275e42b2e65dac71f9"} Jan 21 18:40:06 crc kubenswrapper[4799]: I0121 18:40:06.823782 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8hm2" event={"ID":"725867c6-9e96-4acd-87d3-9a82725c6fb2","Type":"ContainerStarted","Data":"afbf5236be6c8a458dbdcd348a4fc7f9ab162ed5d23af38b5f71ac6d77bf4a07"} Jan 21 18:40:06 crc kubenswrapper[4799]: I0121 18:40:06.847868 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d8hm2" podStartSLOduration=3.433101101 podStartE2EDuration="5.84784841s" podCreationTimestamp="2026-01-21 18:40:01 +0000 UTC" firstStartedPulling="2026-01-21 18:40:03.775810166 +0000 UTC m=+4030.402100229" lastFinishedPulling="2026-01-21 18:40:06.190557515 +0000 UTC m=+4032.816847538" observedRunningTime="2026-01-21 18:40:06.844462224 +0000 UTC m=+4033.470752257" watchObservedRunningTime="2026-01-21 18:40:06.84784841 +0000 UTC m=+4033.474138433" Jan 21 18:40:12 crc kubenswrapper[4799]: I0121 18:40:12.291681 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:12 crc kubenswrapper[4799]: I0121 18:40:12.292339 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:12 crc kubenswrapper[4799]: I0121 18:40:12.377903 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:12 crc kubenswrapper[4799]: I0121 18:40:12.937824 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:13 crc kubenswrapper[4799]: I0121 18:40:13.009279 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8hm2"] Jan 21 18:40:14 crc kubenswrapper[4799]: I0121 18:40:14.910477 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d8hm2" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerName="registry-server" containerID="cri-o://afbf5236be6c8a458dbdcd348a4fc7f9ab162ed5d23af38b5f71ac6d77bf4a07" gracePeriod=2 Jan 21 18:40:15 crc kubenswrapper[4799]: I0121 18:40:15.922738 4799 generic.go:334] "Generic (PLEG): container finished" podID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerID="afbf5236be6c8a458dbdcd348a4fc7f9ab162ed5d23af38b5f71ac6d77bf4a07" exitCode=0 Jan 21 18:40:15 crc kubenswrapper[4799]: I0121 18:40:15.922837 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8hm2" event={"ID":"725867c6-9e96-4acd-87d3-9a82725c6fb2","Type":"ContainerDied","Data":"afbf5236be6c8a458dbdcd348a4fc7f9ab162ed5d23af38b5f71ac6d77bf4a07"} Jan 21 18:40:15 crc kubenswrapper[4799]: I0121 18:40:15.923336 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d8hm2" event={"ID":"725867c6-9e96-4acd-87d3-9a82725c6fb2","Type":"ContainerDied","Data":"d44214af2c4671da8b30beabbb6042973532ee71ad0bbed5cf617b316302f324"} Jan 21 18:40:15 crc kubenswrapper[4799]: I0121 18:40:15.923355 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d44214af2c4671da8b30beabbb6042973532ee71ad0bbed5cf617b316302f324" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.024113 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.173602 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-catalog-content\") pod \"725867c6-9e96-4acd-87d3-9a82725c6fb2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.173707 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nssfj\" (UniqueName: \"kubernetes.io/projected/725867c6-9e96-4acd-87d3-9a82725c6fb2-kube-api-access-nssfj\") pod \"725867c6-9e96-4acd-87d3-9a82725c6fb2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.173773 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-utilities\") pod \"725867c6-9e96-4acd-87d3-9a82725c6fb2\" (UID: \"725867c6-9e96-4acd-87d3-9a82725c6fb2\") " Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.175007 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-utilities" (OuterVolumeSpecName: "utilities") pod "725867c6-9e96-4acd-87d3-9a82725c6fb2" (UID: "725867c6-9e96-4acd-87d3-9a82725c6fb2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.179786 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/725867c6-9e96-4acd-87d3-9a82725c6fb2-kube-api-access-nssfj" (OuterVolumeSpecName: "kube-api-access-nssfj") pod "725867c6-9e96-4acd-87d3-9a82725c6fb2" (UID: "725867c6-9e96-4acd-87d3-9a82725c6fb2"). InnerVolumeSpecName "kube-api-access-nssfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.202351 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "725867c6-9e96-4acd-87d3-9a82725c6fb2" (UID: "725867c6-9e96-4acd-87d3-9a82725c6fb2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.205679 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:40:16 crc kubenswrapper[4799]: E0121 18:40:16.206370 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.276823 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.276866 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nssfj\" (UniqueName: \"kubernetes.io/projected/725867c6-9e96-4acd-87d3-9a82725c6fb2-kube-api-access-nssfj\") on node \"crc\" DevicePath \"\"" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.276879 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/725867c6-9e96-4acd-87d3-9a82725c6fb2-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.932246 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d8hm2" Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.961407 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8hm2"] Jan 21 18:40:16 crc kubenswrapper[4799]: I0121 18:40:16.970253 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d8hm2"] Jan 21 18:40:18 crc kubenswrapper[4799]: I0121 18:40:18.223810 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" path="/var/lib/kubelet/pods/725867c6-9e96-4acd-87d3-9a82725c6fb2/volumes" Jan 21 18:40:28 crc kubenswrapper[4799]: I0121 18:40:28.205616 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:40:28 crc kubenswrapper[4799]: E0121 18:40:28.206425 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:40:42 crc kubenswrapper[4799]: I0121 18:40:42.205107 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:40:42 crc kubenswrapper[4799]: E0121 18:40:42.206110 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.043419 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w5m6x"] Jan 21 18:40:51 crc kubenswrapper[4799]: E0121 18:40:51.044279 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerName="extract-utilities" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.044293 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerName="extract-utilities" Jan 21 18:40:51 crc kubenswrapper[4799]: E0121 18:40:51.044327 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerName="extract-content" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.044333 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerName="extract-content" Jan 21 18:40:51 crc kubenswrapper[4799]: E0121 18:40:51.044344 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerName="registry-server" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.044351 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerName="registry-server" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.044553 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="725867c6-9e96-4acd-87d3-9a82725c6fb2" containerName="registry-server" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.046002 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.056472 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w5m6x"] Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.201623 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62hrw\" (UniqueName: \"kubernetes.io/projected/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-kube-api-access-62hrw\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.201687 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-utilities\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.201791 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-catalog-content\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.303612 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-catalog-content\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.303803 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62hrw\" (UniqueName: \"kubernetes.io/projected/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-kube-api-access-62hrw\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.303860 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-utilities\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.304274 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-catalog-content\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.304422 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-utilities\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.329285 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62hrw\" (UniqueName: \"kubernetes.io/projected/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-kube-api-access-62hrw\") pod \"redhat-operators-w5m6x\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.378745 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:40:51 crc kubenswrapper[4799]: I0121 18:40:51.909493 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w5m6x"] Jan 21 18:40:52 crc kubenswrapper[4799]: I0121 18:40:52.318682 4799 generic.go:334] "Generic (PLEG): container finished" podID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerID="961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52" exitCode=0 Jan 21 18:40:52 crc kubenswrapper[4799]: I0121 18:40:52.318805 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5m6x" event={"ID":"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0","Type":"ContainerDied","Data":"961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52"} Jan 21 18:40:52 crc kubenswrapper[4799]: I0121 18:40:52.319512 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5m6x" event={"ID":"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0","Type":"ContainerStarted","Data":"01ed0ef943590b4b01df3b0cee140b5995a4e693750d13afa6abd7bfc4132edf"} Jan 21 18:40:52 crc kubenswrapper[4799]: I0121 18:40:52.321151 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:40:53 crc kubenswrapper[4799]: I0121 18:40:53.206501 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:40:53 crc kubenswrapper[4799]: E0121 18:40:53.207220 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:40:54 crc kubenswrapper[4799]: I0121 18:40:54.360829 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5m6x" event={"ID":"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0","Type":"ContainerStarted","Data":"846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34"} Jan 21 18:41:02 crc kubenswrapper[4799]: I0121 18:41:02.468417 4799 generic.go:334] "Generic (PLEG): container finished" podID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerID="846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34" exitCode=0 Jan 21 18:41:02 crc kubenswrapper[4799]: I0121 18:41:02.468500 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5m6x" event={"ID":"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0","Type":"ContainerDied","Data":"846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34"} Jan 21 18:41:04 crc kubenswrapper[4799]: I0121 18:41:04.213546 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:41:04 crc kubenswrapper[4799]: E0121 18:41:04.214168 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:41:04 crc kubenswrapper[4799]: I0121 18:41:04.496165 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5m6x" event={"ID":"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0","Type":"ContainerStarted","Data":"1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a"} Jan 21 18:41:04 crc kubenswrapper[4799]: I0121 18:41:04.524147 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w5m6x" podStartSLOduration=1.8988057600000001 podStartE2EDuration="13.524105794s" podCreationTimestamp="2026-01-21 18:40:51 +0000 UTC" firstStartedPulling="2026-01-21 18:40:52.320844831 +0000 UTC m=+4078.947134854" lastFinishedPulling="2026-01-21 18:41:03.946144865 +0000 UTC m=+4090.572434888" observedRunningTime="2026-01-21 18:41:04.515244093 +0000 UTC m=+4091.141534126" watchObservedRunningTime="2026-01-21 18:41:04.524105794 +0000 UTC m=+4091.150395817" Jan 21 18:41:11 crc kubenswrapper[4799]: I0121 18:41:11.378897 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:41:11 crc kubenswrapper[4799]: I0121 18:41:11.379525 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:41:11 crc kubenswrapper[4799]: I0121 18:41:11.438429 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:41:11 crc kubenswrapper[4799]: I0121 18:41:11.640756 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:41:11 crc kubenswrapper[4799]: I0121 18:41:11.691113 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w5m6x"] Jan 21 18:41:13 crc kubenswrapper[4799]: I0121 18:41:13.602390 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w5m6x" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerName="registry-server" containerID="cri-o://1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a" gracePeriod=2 Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.100103 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.191955 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-utilities\") pod \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.192107 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62hrw\" (UniqueName: \"kubernetes.io/projected/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-kube-api-access-62hrw\") pod \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.192279 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-catalog-content\") pod \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\" (UID: \"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0\") " Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.193098 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-utilities" (OuterVolumeSpecName: "utilities") pod "e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" (UID: "e24bd7da-8ec6-4b4a-90b5-69b060dc27c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.199466 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-kube-api-access-62hrw" (OuterVolumeSpecName: "kube-api-access-62hrw") pod "e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" (UID: "e24bd7da-8ec6-4b4a-90b5-69b060dc27c0"). InnerVolumeSpecName "kube-api-access-62hrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.295978 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.296010 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62hrw\" (UniqueName: \"kubernetes.io/projected/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-kube-api-access-62hrw\") on node \"crc\" DevicePath \"\"" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.317562 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" (UID: "e24bd7da-8ec6-4b4a-90b5-69b060dc27c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.398544 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.612987 4799 generic.go:334] "Generic (PLEG): container finished" podID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerID="1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a" exitCode=0 Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.613048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5m6x" event={"ID":"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0","Type":"ContainerDied","Data":"1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a"} Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.613100 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5m6x" event={"ID":"e24bd7da-8ec6-4b4a-90b5-69b060dc27c0","Type":"ContainerDied","Data":"01ed0ef943590b4b01df3b0cee140b5995a4e693750d13afa6abd7bfc4132edf"} Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.613105 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w5m6x" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.613142 4799 scope.go:117] "RemoveContainer" containerID="1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.645165 4799 scope.go:117] "RemoveContainer" containerID="846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.659915 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w5m6x"] Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.673952 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w5m6x"] Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.684537 4799 scope.go:117] "RemoveContainer" containerID="961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.727035 4799 scope.go:117] "RemoveContainer" containerID="1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a" Jan 21 18:41:14 crc kubenswrapper[4799]: E0121 18:41:14.727653 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a\": container with ID starting with 1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a not found: ID does not exist" containerID="1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.727713 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a"} err="failed to get container status \"1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a\": rpc error: code = NotFound desc = could not find container \"1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a\": container with ID starting with 1d756372d32a8638136f2a0115947796067f29845fef46f9a2c339663c65652a not found: ID does not exist" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.727754 4799 scope.go:117] "RemoveContainer" containerID="846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34" Jan 21 18:41:14 crc kubenswrapper[4799]: E0121 18:41:14.731540 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34\": container with ID starting with 846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34 not found: ID does not exist" containerID="846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.731576 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34"} err="failed to get container status \"846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34\": rpc error: code = NotFound desc = could not find container \"846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34\": container with ID starting with 846c839e597f2f42f95982cf1288a29d3fe6e43e33dcafaca5695a18b44ded34 not found: ID does not exist" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.731622 4799 scope.go:117] "RemoveContainer" containerID="961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52" Jan 21 18:41:14 crc kubenswrapper[4799]: E0121 18:41:14.732011 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52\": container with ID starting with 961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52 not found: ID does not exist" containerID="961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52" Jan 21 18:41:14 crc kubenswrapper[4799]: I0121 18:41:14.732039 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52"} err="failed to get container status \"961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52\": rpc error: code = NotFound desc = could not find container \"961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52\": container with ID starting with 961d29739b1e1ff4190fc82831bcd47d01afff1e40174109c7f278b561b94e52 not found: ID does not exist" Jan 21 18:41:16 crc kubenswrapper[4799]: I0121 18:41:16.220750 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" path="/var/lib/kubelet/pods/e24bd7da-8ec6-4b4a-90b5-69b060dc27c0/volumes" Jan 21 18:41:19 crc kubenswrapper[4799]: I0121 18:41:19.205150 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:41:19 crc kubenswrapper[4799]: E0121 18:41:19.205522 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:41:31 crc kubenswrapper[4799]: I0121 18:41:31.205482 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:41:31 crc kubenswrapper[4799]: E0121 18:41:31.206486 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:41:40 crc kubenswrapper[4799]: I0121 18:41:40.729883 4799 scope.go:117] "RemoveContainer" containerID="6270fc67853b15c3ff3b20f24c8fce453575409dbe83f5e573a7843fe1553355" Jan 21 18:41:40 crc kubenswrapper[4799]: I0121 18:41:40.760680 4799 scope.go:117] "RemoveContainer" containerID="a802be8ced9ef403efc7131d87262a34bc6a5b40f374ecf9bf7641c553d7b6de" Jan 21 18:41:40 crc kubenswrapper[4799]: I0121 18:41:40.817838 4799 scope.go:117] "RemoveContainer" containerID="7c3fb1c8c2b148c1b2bcb489aa0a7d40db071b5d34c1cf4935ea6b59d34fed2f" Jan 21 18:41:42 crc kubenswrapper[4799]: I0121 18:41:42.205580 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:41:42 crc kubenswrapper[4799]: E0121 18:41:42.206260 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:41:57 crc kubenswrapper[4799]: I0121 18:41:57.206186 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:41:57 crc kubenswrapper[4799]: E0121 18:41:57.206920 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:42:12 crc kubenswrapper[4799]: I0121 18:42:12.205602 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:42:12 crc kubenswrapper[4799]: E0121 18:42:12.206407 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:42:23 crc kubenswrapper[4799]: I0121 18:42:23.204836 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:42:23 crc kubenswrapper[4799]: E0121 18:42:23.205625 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:42:37 crc kubenswrapper[4799]: I0121 18:42:37.206321 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:42:38 crc kubenswrapper[4799]: I0121 18:42:38.481763 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"39d95bbe738803d01806d4afe6a550161dbadf488cd50cb769dab361a07d378f"} Jan 21 18:44:55 crc kubenswrapper[4799]: I0121 18:44:55.975730 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:44:55 crc kubenswrapper[4799]: I0121 18:44:55.976366 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.196390 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t"] Jan 21 18:45:00 crc kubenswrapper[4799]: E0121 18:45:00.197620 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerName="extract-utilities" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.197636 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerName="extract-utilities" Jan 21 18:45:00 crc kubenswrapper[4799]: E0121 18:45:00.197687 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerName="registry-server" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.197694 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerName="registry-server" Jan 21 18:45:00 crc kubenswrapper[4799]: E0121 18:45:00.197723 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerName="extract-content" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.197731 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerName="extract-content" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.197942 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e24bd7da-8ec6-4b4a-90b5-69b060dc27c0" containerName="registry-server" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.198921 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.201027 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.201555 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.222497 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t"] Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.353801 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-secret-volume\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.353985 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4jbb\" (UniqueName: \"kubernetes.io/projected/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-kube-api-access-w4jbb\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.354316 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-config-volume\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.457227 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-config-volume\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.457452 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-secret-volume\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.457519 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4jbb\" (UniqueName: \"kubernetes.io/projected/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-kube-api-access-w4jbb\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:00 crc kubenswrapper[4799]: I0121 18:45:00.458626 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-config-volume\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:08 crc kubenswrapper[4799]: I0121 18:45:07.994592 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4jbb\" (UniqueName: \"kubernetes.io/projected/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-kube-api-access-w4jbb\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:08 crc kubenswrapper[4799]: I0121 18:45:08.021496 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-secret-volume\") pod \"collect-profiles-29483685-qmv4t\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:08 crc kubenswrapper[4799]: I0121 18:45:08.027182 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:08 crc kubenswrapper[4799]: I0121 18:45:08.760693 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t"] Jan 21 18:45:09 crc kubenswrapper[4799]: I0121 18:45:09.152593 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" event={"ID":"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5","Type":"ContainerStarted","Data":"8f02547d9b7616143b3b39238148ca214ccf4bb2d5b8a3a94af395e8691eec82"} Jan 21 18:45:09 crc kubenswrapper[4799]: I0121 18:45:09.152656 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" event={"ID":"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5","Type":"ContainerStarted","Data":"96a199baaef9538db625e546f66cf7f5dd05cbd5448f15d891774e484c3e7d17"} Jan 21 18:45:10 crc kubenswrapper[4799]: I0121 18:45:10.165329 4799 generic.go:334] "Generic (PLEG): container finished" podID="1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5" containerID="8f02547d9b7616143b3b39238148ca214ccf4bb2d5b8a3a94af395e8691eec82" exitCode=0 Jan 21 18:45:10 crc kubenswrapper[4799]: I0121 18:45:10.165406 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" event={"ID":"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5","Type":"ContainerDied","Data":"8f02547d9b7616143b3b39238148ca214ccf4bb2d5b8a3a94af395e8691eec82"} Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.626811 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.724374 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4jbb\" (UniqueName: \"kubernetes.io/projected/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-kube-api-access-w4jbb\") pod \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.724495 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-config-volume\") pod \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.724762 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-secret-volume\") pod \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\" (UID: \"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5\") " Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.725413 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-config-volume" (OuterVolumeSpecName: "config-volume") pod "1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5" (UID: "1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.731098 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5" (UID: "1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.731695 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-kube-api-access-w4jbb" (OuterVolumeSpecName: "kube-api-access-w4jbb") pod "1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5" (UID: "1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5"). InnerVolumeSpecName "kube-api-access-w4jbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.836081 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4jbb\" (UniqueName: \"kubernetes.io/projected/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-kube-api-access-w4jbb\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.836178 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.836191 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.838366 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq"] Jan 21 18:45:11 crc kubenswrapper[4799]: I0121 18:45:11.881299 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483640-l9qbq"] Jan 21 18:45:12 crc kubenswrapper[4799]: I0121 18:45:12.187954 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" event={"ID":"1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5","Type":"ContainerDied","Data":"96a199baaef9538db625e546f66cf7f5dd05cbd5448f15d891774e484c3e7d17"} Jan 21 18:45:12 crc kubenswrapper[4799]: I0121 18:45:12.188008 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96a199baaef9538db625e546f66cf7f5dd05cbd5448f15d891774e484c3e7d17" Jan 21 18:45:12 crc kubenswrapper[4799]: I0121 18:45:12.188019 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483685-qmv4t" Jan 21 18:45:12 crc kubenswrapper[4799]: I0121 18:45:12.218508 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c82d9473-3cf4-4119-8ffe-9a2ab7bafd81" path="/var/lib/kubelet/pods/c82d9473-3cf4-4119-8ffe-9a2ab7bafd81/volumes" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.507428 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tsn5w"] Jan 21 18:45:18 crc kubenswrapper[4799]: E0121 18:45:18.508609 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5" containerName="collect-profiles" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.508624 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5" containerName="collect-profiles" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.508854 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d4efbc9-496e-4a3a-9da8-6f6f41e80ac5" containerName="collect-profiles" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.510554 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.516874 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tsn5w"] Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.595076 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbjwx\" (UniqueName: \"kubernetes.io/projected/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-kube-api-access-bbjwx\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.595427 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-utilities\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.595494 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-catalog-content\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.698486 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbjwx\" (UniqueName: \"kubernetes.io/projected/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-kube-api-access-bbjwx\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.698590 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-utilities\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.698616 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-catalog-content\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.699258 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-utilities\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.699359 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-catalog-content\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.730479 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbjwx\" (UniqueName: \"kubernetes.io/projected/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-kube-api-access-bbjwx\") pod \"community-operators-tsn5w\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:18 crc kubenswrapper[4799]: I0121 18:45:18.852381 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:19 crc kubenswrapper[4799]: I0121 18:45:19.397906 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tsn5w"] Jan 21 18:45:20 crc kubenswrapper[4799]: I0121 18:45:20.268638 4799 generic.go:334] "Generic (PLEG): container finished" podID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerID="e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846" exitCode=0 Jan 21 18:45:20 crc kubenswrapper[4799]: I0121 18:45:20.268710 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tsn5w" event={"ID":"17d59e2d-663a-4acb-a1df-e5edd0e61dcb","Type":"ContainerDied","Data":"e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846"} Jan 21 18:45:20 crc kubenswrapper[4799]: I0121 18:45:20.268951 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tsn5w" event={"ID":"17d59e2d-663a-4acb-a1df-e5edd0e61dcb","Type":"ContainerStarted","Data":"003113f83f2dcaa16fbfd516a0353b720a83771665d673ae513963d99b77552a"} Jan 21 18:45:24 crc kubenswrapper[4799]: I0121 18:45:24.309682 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tsn5w" event={"ID":"17d59e2d-663a-4acb-a1df-e5edd0e61dcb","Type":"ContainerStarted","Data":"719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9"} Jan 21 18:45:25 crc kubenswrapper[4799]: I0121 18:45:25.322481 4799 generic.go:334] "Generic (PLEG): container finished" podID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerID="719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9" exitCode=0 Jan 21 18:45:25 crc kubenswrapper[4799]: I0121 18:45:25.322549 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tsn5w" event={"ID":"17d59e2d-663a-4acb-a1df-e5edd0e61dcb","Type":"ContainerDied","Data":"719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9"} Jan 21 18:45:25 crc kubenswrapper[4799]: I0121 18:45:25.972225 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:45:25 crc kubenswrapper[4799]: I0121 18:45:25.972641 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:45:26 crc kubenswrapper[4799]: I0121 18:45:26.339672 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tsn5w" event={"ID":"17d59e2d-663a-4acb-a1df-e5edd0e61dcb","Type":"ContainerStarted","Data":"be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1"} Jan 21 18:45:26 crc kubenswrapper[4799]: I0121 18:45:26.371881 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tsn5w" podStartSLOduration=2.902008075 podStartE2EDuration="8.371822343s" podCreationTimestamp="2026-01-21 18:45:18 +0000 UTC" firstStartedPulling="2026-01-21 18:45:20.271806246 +0000 UTC m=+4346.898096269" lastFinishedPulling="2026-01-21 18:45:25.741620504 +0000 UTC m=+4352.367910537" observedRunningTime="2026-01-21 18:45:26.360473332 +0000 UTC m=+4352.986763365" watchObservedRunningTime="2026-01-21 18:45:26.371822343 +0000 UTC m=+4352.998112366" Jan 21 18:45:28 crc kubenswrapper[4799]: I0121 18:45:28.852969 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:28 crc kubenswrapper[4799]: I0121 18:45:28.853270 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:28 crc kubenswrapper[4799]: I0121 18:45:28.904353 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:38 crc kubenswrapper[4799]: I0121 18:45:38.911451 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:38 crc kubenswrapper[4799]: I0121 18:45:38.964457 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tsn5w"] Jan 21 18:45:39 crc kubenswrapper[4799]: I0121 18:45:39.463042 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tsn5w" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerName="registry-server" containerID="cri-o://be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1" gracePeriod=2 Jan 21 18:45:39 crc kubenswrapper[4799]: I0121 18:45:39.978614 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.142316 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-utilities\") pod \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.142682 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-catalog-content\") pod \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.142745 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbjwx\" (UniqueName: \"kubernetes.io/projected/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-kube-api-access-bbjwx\") pod \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\" (UID: \"17d59e2d-663a-4acb-a1df-e5edd0e61dcb\") " Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.143165 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-utilities" (OuterVolumeSpecName: "utilities") pod "17d59e2d-663a-4acb-a1df-e5edd0e61dcb" (UID: "17d59e2d-663a-4acb-a1df-e5edd0e61dcb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.143351 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.149214 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-kube-api-access-bbjwx" (OuterVolumeSpecName: "kube-api-access-bbjwx") pod "17d59e2d-663a-4acb-a1df-e5edd0e61dcb" (UID: "17d59e2d-663a-4acb-a1df-e5edd0e61dcb"). InnerVolumeSpecName "kube-api-access-bbjwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.197603 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "17d59e2d-663a-4acb-a1df-e5edd0e61dcb" (UID: "17d59e2d-663a-4acb-a1df-e5edd0e61dcb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.245605 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.245641 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbjwx\" (UniqueName: \"kubernetes.io/projected/17d59e2d-663a-4acb-a1df-e5edd0e61dcb-kube-api-access-bbjwx\") on node \"crc\" DevicePath \"\"" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.476477 4799 generic.go:334] "Generic (PLEG): container finished" podID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerID="be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1" exitCode=0 Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.476525 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tsn5w" event={"ID":"17d59e2d-663a-4acb-a1df-e5edd0e61dcb","Type":"ContainerDied","Data":"be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1"} Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.476560 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tsn5w" event={"ID":"17d59e2d-663a-4acb-a1df-e5edd0e61dcb","Type":"ContainerDied","Data":"003113f83f2dcaa16fbfd516a0353b720a83771665d673ae513963d99b77552a"} Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.476577 4799 scope.go:117] "RemoveContainer" containerID="be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.476593 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tsn5w" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.504843 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tsn5w"] Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.517304 4799 scope.go:117] "RemoveContainer" containerID="719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.525570 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tsn5w"] Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.542658 4799 scope.go:117] "RemoveContainer" containerID="e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.590357 4799 scope.go:117] "RemoveContainer" containerID="be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1" Jan 21 18:45:40 crc kubenswrapper[4799]: E0121 18:45:40.590786 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1\": container with ID starting with be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1 not found: ID does not exist" containerID="be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.590837 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1"} err="failed to get container status \"be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1\": rpc error: code = NotFound desc = could not find container \"be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1\": container with ID starting with be6b865cd5fb0d6746a5d10fd37041a88ff2517b5c817f9ceddd5d27d08921c1 not found: ID does not exist" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.590868 4799 scope.go:117] "RemoveContainer" containerID="719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9" Jan 21 18:45:40 crc kubenswrapper[4799]: E0121 18:45:40.591122 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9\": container with ID starting with 719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9 not found: ID does not exist" containerID="719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.591177 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9"} err="failed to get container status \"719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9\": rpc error: code = NotFound desc = could not find container \"719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9\": container with ID starting with 719c708556bf86e1d06d867f863d79f8c184919c3cfa2bdcdc42bd6c7cdafab9 not found: ID does not exist" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.591199 4799 scope.go:117] "RemoveContainer" containerID="e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846" Jan 21 18:45:40 crc kubenswrapper[4799]: E0121 18:45:40.591475 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846\": container with ID starting with e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846 not found: ID does not exist" containerID="e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.591506 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846"} err="failed to get container status \"e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846\": rpc error: code = NotFound desc = could not find container \"e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846\": container with ID starting with e40c3265ebb455587193dd17fbe48c4a5adbdd1fc73c7845da981127bb3d8846 not found: ID does not exist" Jan 21 18:45:40 crc kubenswrapper[4799]: I0121 18:45:40.953721 4799 scope.go:117] "RemoveContainer" containerID="92697d8c761a2b0f5621f465332d14d8bdaded0b115501864ba638dffc4ffe9c" Jan 21 18:45:42 crc kubenswrapper[4799]: I0121 18:45:42.217360 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" path="/var/lib/kubelet/pods/17d59e2d-663a-4acb-a1df-e5edd0e61dcb/volumes" Jan 21 18:45:55 crc kubenswrapper[4799]: I0121 18:45:55.971364 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:45:55 crc kubenswrapper[4799]: I0121 18:45:55.972000 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:45:55 crc kubenswrapper[4799]: I0121 18:45:55.972075 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:45:55 crc kubenswrapper[4799]: I0121 18:45:55.973093 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"39d95bbe738803d01806d4afe6a550161dbadf488cd50cb769dab361a07d378f"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:45:55 crc kubenswrapper[4799]: I0121 18:45:55.973186 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://39d95bbe738803d01806d4afe6a550161dbadf488cd50cb769dab361a07d378f" gracePeriod=600 Jan 21 18:45:56 crc kubenswrapper[4799]: I0121 18:45:56.657679 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="39d95bbe738803d01806d4afe6a550161dbadf488cd50cb769dab361a07d378f" exitCode=0 Jan 21 18:45:56 crc kubenswrapper[4799]: I0121 18:45:56.657748 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"39d95bbe738803d01806d4afe6a550161dbadf488cd50cb769dab361a07d378f"} Jan 21 18:45:56 crc kubenswrapper[4799]: I0121 18:45:56.658002 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c"} Jan 21 18:45:56 crc kubenswrapper[4799]: I0121 18:45:56.658025 4799 scope.go:117] "RemoveContainer" containerID="25322227c90519b48b5f3c132f60d84b82603c7bbbbf765ff4766999f0c443c4" Jan 21 18:46:41 crc kubenswrapper[4799]: I0121 18:46:41.032723 4799 scope.go:117] "RemoveContainer" containerID="e02972aaf678fabb3589a5091d5938e5b2a20a1779fee8275e42b2e65dac71f9" Jan 21 18:46:41 crc kubenswrapper[4799]: I0121 18:46:41.057882 4799 scope.go:117] "RemoveContainer" containerID="afbf5236be6c8a458dbdcd348a4fc7f9ab162ed5d23af38b5f71ac6d77bf4a07" Jan 21 18:46:41 crc kubenswrapper[4799]: I0121 18:46:41.122416 4799 scope.go:117] "RemoveContainer" containerID="baeffbc1da7210e62e3d4fba64a9a06cda458ba5e971272abaea70c76d1b128a" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.432578 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z86g5"] Jan 21 18:46:44 crc kubenswrapper[4799]: E0121 18:46:44.434335 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerName="registry-server" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.434361 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerName="registry-server" Jan 21 18:46:44 crc kubenswrapper[4799]: E0121 18:46:44.434382 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerName="extract-content" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.434391 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerName="extract-content" Jan 21 18:46:44 crc kubenswrapper[4799]: E0121 18:46:44.434429 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerName="extract-utilities" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.434439 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerName="extract-utilities" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.434814 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="17d59e2d-663a-4acb-a1df-e5edd0e61dcb" containerName="registry-server" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.437898 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.444239 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-catalog-content\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.444360 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgn2r\" (UniqueName: \"kubernetes.io/projected/f24856a3-e206-4a4a-99f1-51e63c93c337-kube-api-access-rgn2r\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.444597 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-utilities\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.447712 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z86g5"] Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.547625 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-utilities\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.547822 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-catalog-content\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.547882 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgn2r\" (UniqueName: \"kubernetes.io/projected/f24856a3-e206-4a4a-99f1-51e63c93c337-kube-api-access-rgn2r\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.548287 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-utilities\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:44 crc kubenswrapper[4799]: I0121 18:46:44.548419 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-catalog-content\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:45 crc kubenswrapper[4799]: I0121 18:46:45.096411 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgn2r\" (UniqueName: \"kubernetes.io/projected/f24856a3-e206-4a4a-99f1-51e63c93c337-kube-api-access-rgn2r\") pod \"certified-operators-z86g5\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:45 crc kubenswrapper[4799]: I0121 18:46:45.374932 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:45 crc kubenswrapper[4799]: I0121 18:46:45.917719 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z86g5"] Jan 21 18:46:46 crc kubenswrapper[4799]: I0121 18:46:46.174058 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z86g5" event={"ID":"f24856a3-e206-4a4a-99f1-51e63c93c337","Type":"ContainerStarted","Data":"675d8f149106dc114e9301860643d2ab4c4abd46c66a3d3f83cef86d3c0be177"} Jan 21 18:46:47 crc kubenswrapper[4799]: I0121 18:46:47.184979 4799 generic.go:334] "Generic (PLEG): container finished" podID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerID="2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68" exitCode=0 Jan 21 18:46:47 crc kubenswrapper[4799]: I0121 18:46:47.185064 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z86g5" event={"ID":"f24856a3-e206-4a4a-99f1-51e63c93c337","Type":"ContainerDied","Data":"2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68"} Jan 21 18:46:47 crc kubenswrapper[4799]: I0121 18:46:47.189149 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:46:49 crc kubenswrapper[4799]: I0121 18:46:49.210057 4799 generic.go:334] "Generic (PLEG): container finished" podID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerID="35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680" exitCode=0 Jan 21 18:46:49 crc kubenswrapper[4799]: I0121 18:46:49.210591 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z86g5" event={"ID":"f24856a3-e206-4a4a-99f1-51e63c93c337","Type":"ContainerDied","Data":"35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680"} Jan 21 18:46:50 crc kubenswrapper[4799]: I0121 18:46:50.228665 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z86g5" event={"ID":"f24856a3-e206-4a4a-99f1-51e63c93c337","Type":"ContainerStarted","Data":"5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da"} Jan 21 18:46:50 crc kubenswrapper[4799]: I0121 18:46:50.259149 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z86g5" podStartSLOduration=3.80808429 podStartE2EDuration="6.259110436s" podCreationTimestamp="2026-01-21 18:46:44 +0000 UTC" firstStartedPulling="2026-01-21 18:46:47.188833702 +0000 UTC m=+4433.815123725" lastFinishedPulling="2026-01-21 18:46:49.639859848 +0000 UTC m=+4436.266149871" observedRunningTime="2026-01-21 18:46:50.254182006 +0000 UTC m=+4436.880472029" watchObservedRunningTime="2026-01-21 18:46:50.259110436 +0000 UTC m=+4436.885400459" Jan 21 18:46:55 crc kubenswrapper[4799]: I0121 18:46:55.375981 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:55 crc kubenswrapper[4799]: I0121 18:46:55.376842 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:56 crc kubenswrapper[4799]: I0121 18:46:56.238409 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:56 crc kubenswrapper[4799]: I0121 18:46:56.329745 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:56 crc kubenswrapper[4799]: I0121 18:46:56.480690 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z86g5"] Jan 21 18:46:58 crc kubenswrapper[4799]: I0121 18:46:58.306848 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z86g5" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerName="registry-server" containerID="cri-o://5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da" gracePeriod=2 Jan 21 18:46:58 crc kubenswrapper[4799]: I0121 18:46:58.828241 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.020502 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-utilities\") pod \"f24856a3-e206-4a4a-99f1-51e63c93c337\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.021285 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-utilities" (OuterVolumeSpecName: "utilities") pod "f24856a3-e206-4a4a-99f1-51e63c93c337" (UID: "f24856a3-e206-4a4a-99f1-51e63c93c337"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.021487 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgn2r\" (UniqueName: \"kubernetes.io/projected/f24856a3-e206-4a4a-99f1-51e63c93c337-kube-api-access-rgn2r\") pod \"f24856a3-e206-4a4a-99f1-51e63c93c337\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.022088 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-catalog-content\") pod \"f24856a3-e206-4a4a-99f1-51e63c93c337\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.022645 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.030582 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f24856a3-e206-4a4a-99f1-51e63c93c337-kube-api-access-rgn2r" (OuterVolumeSpecName: "kube-api-access-rgn2r") pod "f24856a3-e206-4a4a-99f1-51e63c93c337" (UID: "f24856a3-e206-4a4a-99f1-51e63c93c337"). InnerVolumeSpecName "kube-api-access-rgn2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.126294 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f24856a3-e206-4a4a-99f1-51e63c93c337" (UID: "f24856a3-e206-4a4a-99f1-51e63c93c337"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.127198 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-catalog-content\") pod \"f24856a3-e206-4a4a-99f1-51e63c93c337\" (UID: \"f24856a3-e206-4a4a-99f1-51e63c93c337\") " Jan 21 18:46:59 crc kubenswrapper[4799]: W0121 18:46:59.132759 4799 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/f24856a3-e206-4a4a-99f1-51e63c93c337/volumes/kubernetes.io~empty-dir/catalog-content Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.132808 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f24856a3-e206-4a4a-99f1-51e63c93c337" (UID: "f24856a3-e206-4a4a-99f1-51e63c93c337"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.136819 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgn2r\" (UniqueName: \"kubernetes.io/projected/f24856a3-e206-4a4a-99f1-51e63c93c337-kube-api-access-rgn2r\") on node \"crc\" DevicePath \"\"" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.136865 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f24856a3-e206-4a4a-99f1-51e63c93c337-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.320968 4799 generic.go:334] "Generic (PLEG): container finished" podID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerID="5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da" exitCode=0 Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.321048 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z86g5" event={"ID":"f24856a3-e206-4a4a-99f1-51e63c93c337","Type":"ContainerDied","Data":"5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da"} Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.321343 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z86g5" event={"ID":"f24856a3-e206-4a4a-99f1-51e63c93c337","Type":"ContainerDied","Data":"675d8f149106dc114e9301860643d2ab4c4abd46c66a3d3f83cef86d3c0be177"} Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.321376 4799 scope.go:117] "RemoveContainer" containerID="5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.321118 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z86g5" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.345950 4799 scope.go:117] "RemoveContainer" containerID="35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.373941 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z86g5"] Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.382687 4799 scope.go:117] "RemoveContainer" containerID="2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.383065 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z86g5"] Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.428741 4799 scope.go:117] "RemoveContainer" containerID="5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da" Jan 21 18:46:59 crc kubenswrapper[4799]: E0121 18:46:59.429393 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da\": container with ID starting with 5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da not found: ID does not exist" containerID="5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.429461 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da"} err="failed to get container status \"5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da\": rpc error: code = NotFound desc = could not find container \"5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da\": container with ID starting with 5105820c248923efdc3195b8fcdb0ef4ddd6142a5dccce8932ccb72517fc33da not found: ID does not exist" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.429501 4799 scope.go:117] "RemoveContainer" containerID="35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680" Jan 21 18:46:59 crc kubenswrapper[4799]: E0121 18:46:59.429895 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680\": container with ID starting with 35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680 not found: ID does not exist" containerID="35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.429930 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680"} err="failed to get container status \"35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680\": rpc error: code = NotFound desc = could not find container \"35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680\": container with ID starting with 35f63359ef145072c2ddad30bf1a13aacd7ba09073198c152e2cb3275bc57680 not found: ID does not exist" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.429952 4799 scope.go:117] "RemoveContainer" containerID="2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68" Jan 21 18:46:59 crc kubenswrapper[4799]: E0121 18:46:59.430251 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68\": container with ID starting with 2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68 not found: ID does not exist" containerID="2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68" Jan 21 18:46:59 crc kubenswrapper[4799]: I0121 18:46:59.430302 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68"} err="failed to get container status \"2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68\": rpc error: code = NotFound desc = could not find container \"2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68\": container with ID starting with 2eb6c93b51949b014ffc658043431ac3503998852280c0c81284a41733f45d68 not found: ID does not exist" Jan 21 18:47:00 crc kubenswrapper[4799]: I0121 18:47:00.219289 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" path="/var/lib/kubelet/pods/f24856a3-e206-4a4a-99f1-51e63c93c337/volumes" Jan 21 18:48:21 crc kubenswrapper[4799]: E0121 18:48:21.888237 4799 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.177:53698->38.102.83.177:39551: read tcp 38.102.83.177:53698->38.102.83.177:39551: read: connection reset by peer Jan 21 18:48:25 crc kubenswrapper[4799]: I0121 18:48:25.970653 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:48:25 crc kubenswrapper[4799]: I0121 18:48:25.971425 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:48:55 crc kubenswrapper[4799]: I0121 18:48:55.970713 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:48:55 crc kubenswrapper[4799]: I0121 18:48:55.971344 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:49:25 crc kubenswrapper[4799]: I0121 18:49:25.970834 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:49:25 crc kubenswrapper[4799]: I0121 18:49:25.971489 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:49:25 crc kubenswrapper[4799]: I0121 18:49:25.971552 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:49:25 crc kubenswrapper[4799]: I0121 18:49:25.972610 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:49:25 crc kubenswrapper[4799]: I0121 18:49:25.972672 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" gracePeriod=600 Jan 21 18:49:26 crc kubenswrapper[4799]: E0121 18:49:26.108067 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:49:26 crc kubenswrapper[4799]: I0121 18:49:26.816079 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" exitCode=0 Jan 21 18:49:26 crc kubenswrapper[4799]: I0121 18:49:26.816147 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c"} Jan 21 18:49:26 crc kubenswrapper[4799]: I0121 18:49:26.816205 4799 scope.go:117] "RemoveContainer" containerID="39d95bbe738803d01806d4afe6a550161dbadf488cd50cb769dab361a07d378f" Jan 21 18:49:26 crc kubenswrapper[4799]: I0121 18:49:26.817062 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:49:26 crc kubenswrapper[4799]: E0121 18:49:26.817391 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:49:39 crc kubenswrapper[4799]: I0121 18:49:39.207524 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:49:39 crc kubenswrapper[4799]: E0121 18:49:39.208646 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:49:53 crc kubenswrapper[4799]: I0121 18:49:53.205484 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:49:53 crc kubenswrapper[4799]: E0121 18:49:53.207290 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:50:07 crc kubenswrapper[4799]: I0121 18:50:07.206276 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:50:07 crc kubenswrapper[4799]: E0121 18:50:07.207444 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:50:19 crc kubenswrapper[4799]: I0121 18:50:19.205474 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:50:19 crc kubenswrapper[4799]: E0121 18:50:19.207153 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:50:30 crc kubenswrapper[4799]: I0121 18:50:30.206165 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:50:30 crc kubenswrapper[4799]: E0121 18:50:30.207147 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.128075 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6nklg"] Jan 21 18:50:38 crc kubenswrapper[4799]: E0121 18:50:38.129366 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerName="extract-content" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.129386 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerName="extract-content" Jan 21 18:50:38 crc kubenswrapper[4799]: E0121 18:50:38.129407 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerName="registry-server" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.129416 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerName="registry-server" Jan 21 18:50:38 crc kubenswrapper[4799]: E0121 18:50:38.129438 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerName="extract-utilities" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.129447 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerName="extract-utilities" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.129719 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f24856a3-e206-4a4a-99f1-51e63c93c337" containerName="registry-server" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.133065 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.152228 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc956\" (UniqueName: \"kubernetes.io/projected/a832845b-cdec-4384-a44e-1db092166b74-kube-api-access-nc956\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.152640 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-utilities\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.152698 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-catalog-content\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.165300 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nklg"] Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.254249 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc956\" (UniqueName: \"kubernetes.io/projected/a832845b-cdec-4384-a44e-1db092166b74-kube-api-access-nc956\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.254319 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-utilities\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.254666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-catalog-content\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.255031 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-utilities\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.255233 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-catalog-content\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.292518 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc956\" (UniqueName: \"kubernetes.io/projected/a832845b-cdec-4384-a44e-1db092166b74-kube-api-access-nc956\") pod \"redhat-marketplace-6nklg\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:38 crc kubenswrapper[4799]: I0121 18:50:38.467373 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:39 crc kubenswrapper[4799]: I0121 18:50:39.038762 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nklg"] Jan 21 18:50:39 crc kubenswrapper[4799]: W0121 18:50:39.053011 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda832845b_cdec_4384_a44e_1db092166b74.slice/crio-47d997c0425c44c74c8f61f941745038fe43d4a61fce767292fbf35a04140313 WatchSource:0}: Error finding container 47d997c0425c44c74c8f61f941745038fe43d4a61fce767292fbf35a04140313: Status 404 returned error can't find the container with id 47d997c0425c44c74c8f61f941745038fe43d4a61fce767292fbf35a04140313 Jan 21 18:50:39 crc kubenswrapper[4799]: I0121 18:50:39.602539 4799 generic.go:334] "Generic (PLEG): container finished" podID="a832845b-cdec-4384-a44e-1db092166b74" containerID="2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af" exitCode=0 Jan 21 18:50:39 crc kubenswrapper[4799]: I0121 18:50:39.602719 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nklg" event={"ID":"a832845b-cdec-4384-a44e-1db092166b74","Type":"ContainerDied","Data":"2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af"} Jan 21 18:50:39 crc kubenswrapper[4799]: I0121 18:50:39.602838 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nklg" event={"ID":"a832845b-cdec-4384-a44e-1db092166b74","Type":"ContainerStarted","Data":"47d997c0425c44c74c8f61f941745038fe43d4a61fce767292fbf35a04140313"} Jan 21 18:50:40 crc kubenswrapper[4799]: I0121 18:50:40.617101 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nklg" event={"ID":"a832845b-cdec-4384-a44e-1db092166b74","Type":"ContainerStarted","Data":"34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922"} Jan 21 18:50:41 crc kubenswrapper[4799]: I0121 18:50:41.628734 4799 generic.go:334] "Generic (PLEG): container finished" podID="a832845b-cdec-4384-a44e-1db092166b74" containerID="34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922" exitCode=0 Jan 21 18:50:41 crc kubenswrapper[4799]: I0121 18:50:41.628875 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nklg" event={"ID":"a832845b-cdec-4384-a44e-1db092166b74","Type":"ContainerDied","Data":"34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922"} Jan 21 18:50:42 crc kubenswrapper[4799]: I0121 18:50:42.641029 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nklg" event={"ID":"a832845b-cdec-4384-a44e-1db092166b74","Type":"ContainerStarted","Data":"74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee"} Jan 21 18:50:42 crc kubenswrapper[4799]: I0121 18:50:42.664763 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6nklg" podStartSLOduration=2.20236073 podStartE2EDuration="4.664739977s" podCreationTimestamp="2026-01-21 18:50:38 +0000 UTC" firstStartedPulling="2026-01-21 18:50:39.606767593 +0000 UTC m=+4666.233057616" lastFinishedPulling="2026-01-21 18:50:42.06914684 +0000 UTC m=+4668.695436863" observedRunningTime="2026-01-21 18:50:42.656779231 +0000 UTC m=+4669.283069264" watchObservedRunningTime="2026-01-21 18:50:42.664739977 +0000 UTC m=+4669.291030000" Jan 21 18:50:43 crc kubenswrapper[4799]: I0121 18:50:43.205538 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:50:43 crc kubenswrapper[4799]: E0121 18:50:43.206036 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:50:48 crc kubenswrapper[4799]: I0121 18:50:48.468612 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:48 crc kubenswrapper[4799]: I0121 18:50:48.470735 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:48 crc kubenswrapper[4799]: I0121 18:50:48.535889 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:48 crc kubenswrapper[4799]: I0121 18:50:48.784401 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:48 crc kubenswrapper[4799]: I0121 18:50:48.838838 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nklg"] Jan 21 18:50:50 crc kubenswrapper[4799]: I0121 18:50:50.725540 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6nklg" podUID="a832845b-cdec-4384-a44e-1db092166b74" containerName="registry-server" containerID="cri-o://74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee" gracePeriod=2 Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.180503 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.379508 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nc956\" (UniqueName: \"kubernetes.io/projected/a832845b-cdec-4384-a44e-1db092166b74-kube-api-access-nc956\") pod \"a832845b-cdec-4384-a44e-1db092166b74\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.380003 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-catalog-content\") pod \"a832845b-cdec-4384-a44e-1db092166b74\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.380084 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-utilities\") pod \"a832845b-cdec-4384-a44e-1db092166b74\" (UID: \"a832845b-cdec-4384-a44e-1db092166b74\") " Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.380897 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-utilities" (OuterVolumeSpecName: "utilities") pod "a832845b-cdec-4384-a44e-1db092166b74" (UID: "a832845b-cdec-4384-a44e-1db092166b74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.381017 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.387604 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a832845b-cdec-4384-a44e-1db092166b74-kube-api-access-nc956" (OuterVolumeSpecName: "kube-api-access-nc956") pod "a832845b-cdec-4384-a44e-1db092166b74" (UID: "a832845b-cdec-4384-a44e-1db092166b74"). InnerVolumeSpecName "kube-api-access-nc956". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.402504 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a832845b-cdec-4384-a44e-1db092166b74" (UID: "a832845b-cdec-4384-a44e-1db092166b74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.484830 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nc956\" (UniqueName: \"kubernetes.io/projected/a832845b-cdec-4384-a44e-1db092166b74-kube-api-access-nc956\") on node \"crc\" DevicePath \"\"" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.484902 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a832845b-cdec-4384-a44e-1db092166b74-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.743351 4799 generic.go:334] "Generic (PLEG): container finished" podID="a832845b-cdec-4384-a44e-1db092166b74" containerID="74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee" exitCode=0 Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.743412 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nklg" event={"ID":"a832845b-cdec-4384-a44e-1db092166b74","Type":"ContainerDied","Data":"74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee"} Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.744357 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nklg" event={"ID":"a832845b-cdec-4384-a44e-1db092166b74","Type":"ContainerDied","Data":"47d997c0425c44c74c8f61f941745038fe43d4a61fce767292fbf35a04140313"} Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.743442 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6nklg" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.744424 4799 scope.go:117] "RemoveContainer" containerID="74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.775090 4799 scope.go:117] "RemoveContainer" containerID="34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.788600 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nklg"] Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.799821 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nklg"] Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.809604 4799 scope.go:117] "RemoveContainer" containerID="2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.859164 4799 scope.go:117] "RemoveContainer" containerID="74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee" Jan 21 18:50:51 crc kubenswrapper[4799]: E0121 18:50:51.859820 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee\": container with ID starting with 74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee not found: ID does not exist" containerID="74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.859853 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee"} err="failed to get container status \"74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee\": rpc error: code = NotFound desc = could not find container \"74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee\": container with ID starting with 74da404e5d840352b16e2e336d95c518bef045480ccd31feafb275e088abd5ee not found: ID does not exist" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.859890 4799 scope.go:117] "RemoveContainer" containerID="34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922" Jan 21 18:50:51 crc kubenswrapper[4799]: E0121 18:50:51.860378 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922\": container with ID starting with 34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922 not found: ID does not exist" containerID="34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.860429 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922"} err="failed to get container status \"34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922\": rpc error: code = NotFound desc = could not find container \"34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922\": container with ID starting with 34d99ade10c8cdd830ae1ef50fa88612ac9d7362074a0692e2b0f96f0d8bc922 not found: ID does not exist" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.860465 4799 scope.go:117] "RemoveContainer" containerID="2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af" Jan 21 18:50:51 crc kubenswrapper[4799]: E0121 18:50:51.863724 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af\": container with ID starting with 2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af not found: ID does not exist" containerID="2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af" Jan 21 18:50:51 crc kubenswrapper[4799]: I0121 18:50:51.863791 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af"} err="failed to get container status \"2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af\": rpc error: code = NotFound desc = could not find container \"2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af\": container with ID starting with 2e1487ee5d6678553d30d89bfa7bc2493d9b1a9415a491c95240de3a0dc606af not found: ID does not exist" Jan 21 18:50:52 crc kubenswrapper[4799]: I0121 18:50:52.217426 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a832845b-cdec-4384-a44e-1db092166b74" path="/var/lib/kubelet/pods/a832845b-cdec-4384-a44e-1db092166b74/volumes" Jan 21 18:50:58 crc kubenswrapper[4799]: I0121 18:50:58.206441 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:50:58 crc kubenswrapper[4799]: E0121 18:50:58.208348 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:51:09 crc kubenswrapper[4799]: I0121 18:51:09.205672 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:51:09 crc kubenswrapper[4799]: E0121 18:51:09.206514 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:51:21 crc kubenswrapper[4799]: I0121 18:51:21.206329 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:51:21 crc kubenswrapper[4799]: E0121 18:51:21.207558 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:51:33 crc kubenswrapper[4799]: I0121 18:51:33.205323 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:51:33 crc kubenswrapper[4799]: E0121 18:51:33.206111 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:51:47 crc kubenswrapper[4799]: I0121 18:51:47.205442 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:51:47 crc kubenswrapper[4799]: E0121 18:51:47.207004 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:51:58 crc kubenswrapper[4799]: I0121 18:51:58.205898 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:51:58 crc kubenswrapper[4799]: E0121 18:51:58.206959 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:52:10 crc kubenswrapper[4799]: I0121 18:52:10.205927 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:52:10 crc kubenswrapper[4799]: E0121 18:52:10.206725 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:52:17 crc kubenswrapper[4799]: I0121 18:52:17.941697 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hcj5r"] Jan 21 18:52:17 crc kubenswrapper[4799]: E0121 18:52:17.942797 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a832845b-cdec-4384-a44e-1db092166b74" containerName="registry-server" Jan 21 18:52:17 crc kubenswrapper[4799]: I0121 18:52:17.942818 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a832845b-cdec-4384-a44e-1db092166b74" containerName="registry-server" Jan 21 18:52:17 crc kubenswrapper[4799]: E0121 18:52:17.942830 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a832845b-cdec-4384-a44e-1db092166b74" containerName="extract-content" Jan 21 18:52:17 crc kubenswrapper[4799]: I0121 18:52:17.942838 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a832845b-cdec-4384-a44e-1db092166b74" containerName="extract-content" Jan 21 18:52:17 crc kubenswrapper[4799]: E0121 18:52:17.942878 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a832845b-cdec-4384-a44e-1db092166b74" containerName="extract-utilities" Jan 21 18:52:17 crc kubenswrapper[4799]: I0121 18:52:17.942888 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a832845b-cdec-4384-a44e-1db092166b74" containerName="extract-utilities" Jan 21 18:52:17 crc kubenswrapper[4799]: I0121 18:52:17.943219 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a832845b-cdec-4384-a44e-1db092166b74" containerName="registry-server" Jan 21 18:52:17 crc kubenswrapper[4799]: I0121 18:52:17.947806 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:17 crc kubenswrapper[4799]: I0121 18:52:17.958507 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hcj5r"] Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.101903 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lckhl\" (UniqueName: \"kubernetes.io/projected/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-kube-api-access-lckhl\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.102020 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-catalog-content\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.102492 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-utilities\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.204272 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-catalog-content\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.204478 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-utilities\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.204598 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lckhl\" (UniqueName: \"kubernetes.io/projected/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-kube-api-access-lckhl\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.204923 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-catalog-content\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.204933 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-utilities\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.238609 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lckhl\" (UniqueName: \"kubernetes.io/projected/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-kube-api-access-lckhl\") pod \"redhat-operators-hcj5r\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.273610 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:18 crc kubenswrapper[4799]: I0121 18:52:18.800358 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hcj5r"] Jan 21 18:52:19 crc kubenswrapper[4799]: W0121 18:52:19.398198 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod201e1a5f_221e_4ba7_993c_fc8ffd43bc9b.slice/crio-33e58d6ce175b406054988eec35359b9a2dd8fff44c756731785ae1656fa965b WatchSource:0}: Error finding container 33e58d6ce175b406054988eec35359b9a2dd8fff44c756731785ae1656fa965b: Status 404 returned error can't find the container with id 33e58d6ce175b406054988eec35359b9a2dd8fff44c756731785ae1656fa965b Jan 21 18:52:19 crc kubenswrapper[4799]: I0121 18:52:19.821737 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcj5r" event={"ID":"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b","Type":"ContainerStarted","Data":"33e58d6ce175b406054988eec35359b9a2dd8fff44c756731785ae1656fa965b"} Jan 21 18:52:20 crc kubenswrapper[4799]: I0121 18:52:20.833634 4799 generic.go:334] "Generic (PLEG): container finished" podID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerID="98eaa63ce01066463a6a867e84cd100b1c918b7eed2f0a1afa12828de2447c61" exitCode=0 Jan 21 18:52:20 crc kubenswrapper[4799]: I0121 18:52:20.833689 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcj5r" event={"ID":"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b","Type":"ContainerDied","Data":"98eaa63ce01066463a6a867e84cd100b1c918b7eed2f0a1afa12828de2447c61"} Jan 21 18:52:20 crc kubenswrapper[4799]: I0121 18:52:20.836661 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:52:22 crc kubenswrapper[4799]: I0121 18:52:22.856425 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcj5r" event={"ID":"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b","Type":"ContainerStarted","Data":"d90e9bd6f187fd6b0ac36f75e34c5bb75d7943e6de0d36ce8a7ccde024b616c1"} Jan 21 18:52:25 crc kubenswrapper[4799]: I0121 18:52:25.298316 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:52:25 crc kubenswrapper[4799]: E0121 18:52:25.298822 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:52:25 crc kubenswrapper[4799]: I0121 18:52:25.914258 4799 generic.go:334] "Generic (PLEG): container finished" podID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerID="d90e9bd6f187fd6b0ac36f75e34c5bb75d7943e6de0d36ce8a7ccde024b616c1" exitCode=0 Jan 21 18:52:25 crc kubenswrapper[4799]: I0121 18:52:25.914317 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcj5r" event={"ID":"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b","Type":"ContainerDied","Data":"d90e9bd6f187fd6b0ac36f75e34c5bb75d7943e6de0d36ce8a7ccde024b616c1"} Jan 21 18:52:26 crc kubenswrapper[4799]: I0121 18:52:26.930156 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcj5r" event={"ID":"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b","Type":"ContainerStarted","Data":"9700d371a53761bd098f7caaf6e312d8c6ef0ab3fb25c979f92ac3865ed0544e"} Jan 21 18:52:26 crc kubenswrapper[4799]: I0121 18:52:26.960983 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hcj5r" podStartSLOduration=4.20540444 podStartE2EDuration="9.960957213s" podCreationTimestamp="2026-01-21 18:52:17 +0000 UTC" firstStartedPulling="2026-01-21 18:52:20.836427464 +0000 UTC m=+4767.462717487" lastFinishedPulling="2026-01-21 18:52:26.591980237 +0000 UTC m=+4773.218270260" observedRunningTime="2026-01-21 18:52:26.955596732 +0000 UTC m=+4773.581886785" watchObservedRunningTime="2026-01-21 18:52:26.960957213 +0000 UTC m=+4773.587247246" Jan 21 18:52:28 crc kubenswrapper[4799]: I0121 18:52:28.274168 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:28 crc kubenswrapper[4799]: I0121 18:52:28.274565 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:29 crc kubenswrapper[4799]: I0121 18:52:29.325323 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hcj5r" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="registry-server" probeResult="failure" output=< Jan 21 18:52:29 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 18:52:29 crc kubenswrapper[4799]: > Jan 21 18:52:38 crc kubenswrapper[4799]: I0121 18:52:38.204989 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:52:38 crc kubenswrapper[4799]: E0121 18:52:38.205713 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:52:38 crc kubenswrapper[4799]: I0121 18:52:38.338909 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:38 crc kubenswrapper[4799]: I0121 18:52:38.394326 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:38 crc kubenswrapper[4799]: I0121 18:52:38.578240 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hcj5r"] Jan 21 18:52:40 crc kubenswrapper[4799]: I0121 18:52:40.084289 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hcj5r" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="registry-server" containerID="cri-o://9700d371a53761bd098f7caaf6e312d8c6ef0ab3fb25c979f92ac3865ed0544e" gracePeriod=2 Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.096295 4799 generic.go:334] "Generic (PLEG): container finished" podID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerID="9700d371a53761bd098f7caaf6e312d8c6ef0ab3fb25c979f92ac3865ed0544e" exitCode=0 Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.096556 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcj5r" event={"ID":"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b","Type":"ContainerDied","Data":"9700d371a53761bd098f7caaf6e312d8c6ef0ab3fb25c979f92ac3865ed0544e"} Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.223689 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.333121 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-utilities\") pod \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.333501 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lckhl\" (UniqueName: \"kubernetes.io/projected/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-kube-api-access-lckhl\") pod \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.334051 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-utilities" (OuterVolumeSpecName: "utilities") pod "201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" (UID: "201e1a5f-221e-4ba7-993c-fc8ffd43bc9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.334274 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-catalog-content\") pod \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\" (UID: \"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b\") " Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.335854 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.339450 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-kube-api-access-lckhl" (OuterVolumeSpecName: "kube-api-access-lckhl") pod "201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" (UID: "201e1a5f-221e-4ba7-993c-fc8ffd43bc9b"). InnerVolumeSpecName "kube-api-access-lckhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.438112 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lckhl\" (UniqueName: \"kubernetes.io/projected/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-kube-api-access-lckhl\") on node \"crc\" DevicePath \"\"" Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.458801 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" (UID: "201e1a5f-221e-4ba7-993c-fc8ffd43bc9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:52:41 crc kubenswrapper[4799]: I0121 18:52:41.540786 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:52:42 crc kubenswrapper[4799]: I0121 18:52:42.109070 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hcj5r" event={"ID":"201e1a5f-221e-4ba7-993c-fc8ffd43bc9b","Type":"ContainerDied","Data":"33e58d6ce175b406054988eec35359b9a2dd8fff44c756731785ae1656fa965b"} Jan 21 18:52:42 crc kubenswrapper[4799]: I0121 18:52:42.109220 4799 scope.go:117] "RemoveContainer" containerID="9700d371a53761bd098f7caaf6e312d8c6ef0ab3fb25c979f92ac3865ed0544e" Jan 21 18:52:42 crc kubenswrapper[4799]: I0121 18:52:42.109394 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hcj5r" Jan 21 18:52:42 crc kubenswrapper[4799]: I0121 18:52:42.141073 4799 scope.go:117] "RemoveContainer" containerID="d90e9bd6f187fd6b0ac36f75e34c5bb75d7943e6de0d36ce8a7ccde024b616c1" Jan 21 18:52:42 crc kubenswrapper[4799]: I0121 18:52:42.177170 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hcj5r"] Jan 21 18:52:42 crc kubenswrapper[4799]: I0121 18:52:42.185434 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hcj5r"] Jan 21 18:52:42 crc kubenswrapper[4799]: I0121 18:52:42.190197 4799 scope.go:117] "RemoveContainer" containerID="98eaa63ce01066463a6a867e84cd100b1c918b7eed2f0a1afa12828de2447c61" Jan 21 18:52:42 crc kubenswrapper[4799]: I0121 18:52:42.220269 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" path="/var/lib/kubelet/pods/201e1a5f-221e-4ba7-993c-fc8ffd43bc9b/volumes" Jan 21 18:52:52 crc kubenswrapper[4799]: I0121 18:52:52.206427 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:52:52 crc kubenswrapper[4799]: E0121 18:52:52.207291 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:53:03 crc kubenswrapper[4799]: I0121 18:53:03.205503 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:53:03 crc kubenswrapper[4799]: E0121 18:53:03.206459 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:53:17 crc kubenswrapper[4799]: I0121 18:53:17.206024 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:53:17 crc kubenswrapper[4799]: E0121 18:53:17.207750 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:53:32 crc kubenswrapper[4799]: I0121 18:53:32.205058 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:53:32 crc kubenswrapper[4799]: E0121 18:53:32.205829 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:53:46 crc kubenswrapper[4799]: I0121 18:53:46.205296 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:53:46 crc kubenswrapper[4799]: E0121 18:53:46.206147 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:53:59 crc kubenswrapper[4799]: I0121 18:53:59.223272 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:53:59 crc kubenswrapper[4799]: E0121 18:53:59.224582 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:54:10 crc kubenswrapper[4799]: I0121 18:54:10.211819 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:54:10 crc kubenswrapper[4799]: E0121 18:54:10.212672 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:54:21 crc kubenswrapper[4799]: I0121 18:54:21.205725 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:54:21 crc kubenswrapper[4799]: E0121 18:54:21.206515 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 18:54:33 crc kubenswrapper[4799]: I0121 18:54:33.206042 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:54:34 crc kubenswrapper[4799]: I0121 18:54:34.258513 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"ea14aa4ba04a0d2455a9611b11a13b3eb56975233f138e469cb13f8c18deacd0"} Jan 21 18:55:30 crc kubenswrapper[4799]: I0121 18:55:30.632865 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-749b6794b5-k8rw7" podUID="f7542699-9beb-4966-b1e4-b3c3cb9b42ff" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.235110 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j92cx"] Jan 21 18:56:07 crc kubenswrapper[4799]: E0121 18:56:07.236287 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="registry-server" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.236303 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="registry-server" Jan 21 18:56:07 crc kubenswrapper[4799]: E0121 18:56:07.236337 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="extract-utilities" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.236345 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="extract-utilities" Jan 21 18:56:07 crc kubenswrapper[4799]: E0121 18:56:07.236361 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="extract-content" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.236370 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="extract-content" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.236638 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="201e1a5f-221e-4ba7-993c-fc8ffd43bc9b" containerName="registry-server" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.238660 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.247242 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j92cx"] Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.397431 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-catalog-content\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.397498 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mhwr\" (UniqueName: \"kubernetes.io/projected/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-kube-api-access-7mhwr\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.397754 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-utilities\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.500167 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-catalog-content\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.500483 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mhwr\" (UniqueName: \"kubernetes.io/projected/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-kube-api-access-7mhwr\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.500629 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-utilities\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.500698 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-catalog-content\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.501170 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-utilities\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.607034 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mhwr\" (UniqueName: \"kubernetes.io/projected/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-kube-api-access-7mhwr\") pod \"community-operators-j92cx\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:07 crc kubenswrapper[4799]: I0121 18:56:07.876121 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:08 crc kubenswrapper[4799]: I0121 18:56:08.548168 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j92cx"] Jan 21 18:56:09 crc kubenswrapper[4799]: I0121 18:56:09.212765 4799 generic.go:334] "Generic (PLEG): container finished" podID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerID="1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033" exitCode=0 Jan 21 18:56:09 crc kubenswrapper[4799]: I0121 18:56:09.213004 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j92cx" event={"ID":"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8","Type":"ContainerDied","Data":"1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033"} Jan 21 18:56:09 crc kubenswrapper[4799]: I0121 18:56:09.213346 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j92cx" event={"ID":"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8","Type":"ContainerStarted","Data":"3366fe3589e2593005252e813553123f3154aafbb4a7e5b119caf787e66fffe0"} Jan 21 18:56:11 crc kubenswrapper[4799]: I0121 18:56:11.235672 4799 generic.go:334] "Generic (PLEG): container finished" podID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerID="40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17" exitCode=0 Jan 21 18:56:11 crc kubenswrapper[4799]: I0121 18:56:11.235733 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j92cx" event={"ID":"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8","Type":"ContainerDied","Data":"40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17"} Jan 21 18:56:12 crc kubenswrapper[4799]: I0121 18:56:12.247331 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j92cx" event={"ID":"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8","Type":"ContainerStarted","Data":"12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd"} Jan 21 18:56:12 crc kubenswrapper[4799]: I0121 18:56:12.273083 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j92cx" podStartSLOduration=2.832355345 podStartE2EDuration="5.273058959s" podCreationTimestamp="2026-01-21 18:56:07 +0000 UTC" firstStartedPulling="2026-01-21 18:56:09.225368384 +0000 UTC m=+4995.851658417" lastFinishedPulling="2026-01-21 18:56:11.666071998 +0000 UTC m=+4998.292362031" observedRunningTime="2026-01-21 18:56:12.266078051 +0000 UTC m=+4998.892368074" watchObservedRunningTime="2026-01-21 18:56:12.273058959 +0000 UTC m=+4998.899348982" Jan 21 18:56:17 crc kubenswrapper[4799]: I0121 18:56:17.876320 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:17 crc kubenswrapper[4799]: I0121 18:56:17.876826 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:17 crc kubenswrapper[4799]: I0121 18:56:17.945196 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:18 crc kubenswrapper[4799]: I0121 18:56:18.373058 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:18 crc kubenswrapper[4799]: I0121 18:56:18.431516 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j92cx"] Jan 21 18:56:20 crc kubenswrapper[4799]: I0121 18:56:20.326950 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j92cx" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerName="registry-server" containerID="cri-o://12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd" gracePeriod=2 Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.306354 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.340773 4799 generic.go:334] "Generic (PLEG): container finished" podID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerID="12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd" exitCode=0 Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.340817 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j92cx" event={"ID":"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8","Type":"ContainerDied","Data":"12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd"} Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.340851 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j92cx" event={"ID":"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8","Type":"ContainerDied","Data":"3366fe3589e2593005252e813553123f3154aafbb4a7e5b119caf787e66fffe0"} Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.340880 4799 scope.go:117] "RemoveContainer" containerID="12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.341046 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j92cx" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.383868 4799 scope.go:117] "RemoveContainer" containerID="40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.407311 4799 scope.go:117] "RemoveContainer" containerID="1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.420910 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-catalog-content\") pod \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.421005 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-utilities\") pod \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.421134 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mhwr\" (UniqueName: \"kubernetes.io/projected/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-kube-api-access-7mhwr\") pod \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\" (UID: \"53e819ed-cc0e-4d4c-826f-949d5a7cd1e8\") " Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.422795 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-utilities" (OuterVolumeSpecName: "utilities") pod "53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" (UID: "53e819ed-cc0e-4d4c-826f-949d5a7cd1e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.426881 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-kube-api-access-7mhwr" (OuterVolumeSpecName: "kube-api-access-7mhwr") pod "53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" (UID: "53e819ed-cc0e-4d4c-826f-949d5a7cd1e8"). InnerVolumeSpecName "kube-api-access-7mhwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.469071 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" (UID: "53e819ed-cc0e-4d4c-826f-949d5a7cd1e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.497741 4799 scope.go:117] "RemoveContainer" containerID="12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd" Jan 21 18:56:21 crc kubenswrapper[4799]: E0121 18:56:21.498259 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd\": container with ID starting with 12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd not found: ID does not exist" containerID="12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.498297 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd"} err="failed to get container status \"12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd\": rpc error: code = NotFound desc = could not find container \"12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd\": container with ID starting with 12b6e088f3e6dba9eade1f44737d3dd0b64bdb17fd21b30f99f432bd69acd2bd not found: ID does not exist" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.498328 4799 scope.go:117] "RemoveContainer" containerID="40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17" Jan 21 18:56:21 crc kubenswrapper[4799]: E0121 18:56:21.498812 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17\": container with ID starting with 40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17 not found: ID does not exist" containerID="40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.498865 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17"} err="failed to get container status \"40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17\": rpc error: code = NotFound desc = could not find container \"40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17\": container with ID starting with 40d64d2161c17bf7a33b70cb00827680c3714d04c045991dfb65932e8d929b17 not found: ID does not exist" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.498907 4799 scope.go:117] "RemoveContainer" containerID="1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033" Jan 21 18:56:21 crc kubenswrapper[4799]: E0121 18:56:21.499386 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033\": container with ID starting with 1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033 not found: ID does not exist" containerID="1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.499419 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033"} err="failed to get container status \"1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033\": rpc error: code = NotFound desc = could not find container \"1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033\": container with ID starting with 1b13248da0a68821130a11484f605a4e04fd19e9a95ef44708564c5792f89033 not found: ID does not exist" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.524947 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.524985 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.524998 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mhwr\" (UniqueName: \"kubernetes.io/projected/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8-kube-api-access-7mhwr\") on node \"crc\" DevicePath \"\"" Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.685596 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j92cx"] Jan 21 18:56:21 crc kubenswrapper[4799]: I0121 18:56:21.694227 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j92cx"] Jan 21 18:56:22 crc kubenswrapper[4799]: I0121 18:56:22.223538 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" path="/var/lib/kubelet/pods/53e819ed-cc0e-4d4c-826f-949d5a7cd1e8/volumes" Jan 21 18:56:55 crc kubenswrapper[4799]: I0121 18:56:55.971086 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:56:55 crc kubenswrapper[4799]: I0121 18:56:55.971689 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:57:25 crc kubenswrapper[4799]: I0121 18:57:25.970263 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:57:25 crc kubenswrapper[4799]: I0121 18:57:25.970933 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.218278 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wxx8v"] Jan 21 18:57:32 crc kubenswrapper[4799]: E0121 18:57:32.219507 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerName="registry-server" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.219564 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerName="registry-server" Jan 21 18:57:32 crc kubenswrapper[4799]: E0121 18:57:32.219603 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerName="extract-utilities" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.219617 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerName="extract-utilities" Jan 21 18:57:32 crc kubenswrapper[4799]: E0121 18:57:32.219685 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerName="extract-content" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.219697 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerName="extract-content" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.220038 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="53e819ed-cc0e-4d4c-826f-949d5a7cd1e8" containerName="registry-server" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.222760 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.230832 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wxx8v"] Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.325676 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-utilities\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.325877 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-catalog-content\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.326238 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8pbw\" (UniqueName: \"kubernetes.io/projected/a8d966e7-465a-468b-a097-44f03b9c0bea-kube-api-access-m8pbw\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.427944 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8pbw\" (UniqueName: \"kubernetes.io/projected/a8d966e7-465a-468b-a097-44f03b9c0bea-kube-api-access-m8pbw\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.428093 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-utilities\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.428170 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-catalog-content\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.428594 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-catalog-content\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.428733 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-utilities\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.450793 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8pbw\" (UniqueName: \"kubernetes.io/projected/a8d966e7-465a-468b-a097-44f03b9c0bea-kube-api-access-m8pbw\") pod \"certified-operators-wxx8v\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:32 crc kubenswrapper[4799]: I0121 18:57:32.546317 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:33 crc kubenswrapper[4799]: I0121 18:57:33.075771 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wxx8v"] Jan 21 18:57:33 crc kubenswrapper[4799]: I0121 18:57:33.118536 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wxx8v" event={"ID":"a8d966e7-465a-468b-a097-44f03b9c0bea","Type":"ContainerStarted","Data":"c30785998192c3c69f7f11f7421c6fcc8ac4a6f1f97798c659d931f2e79859da"} Jan 21 18:57:34 crc kubenswrapper[4799]: I0121 18:57:34.141647 4799 generic.go:334] "Generic (PLEG): container finished" podID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerID="08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0" exitCode=0 Jan 21 18:57:34 crc kubenswrapper[4799]: I0121 18:57:34.141866 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wxx8v" event={"ID":"a8d966e7-465a-468b-a097-44f03b9c0bea","Type":"ContainerDied","Data":"08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0"} Jan 21 18:57:34 crc kubenswrapper[4799]: I0121 18:57:34.147617 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 18:57:36 crc kubenswrapper[4799]: I0121 18:57:36.162680 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wxx8v" event={"ID":"a8d966e7-465a-468b-a097-44f03b9c0bea","Type":"ContainerStarted","Data":"5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de"} Jan 21 18:57:37 crc kubenswrapper[4799]: I0121 18:57:37.174686 4799 generic.go:334] "Generic (PLEG): container finished" podID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerID="5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de" exitCode=0 Jan 21 18:57:37 crc kubenswrapper[4799]: I0121 18:57:37.174731 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wxx8v" event={"ID":"a8d966e7-465a-468b-a097-44f03b9c0bea","Type":"ContainerDied","Data":"5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de"} Jan 21 18:57:39 crc kubenswrapper[4799]: I0121 18:57:39.198174 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wxx8v" event={"ID":"a8d966e7-465a-468b-a097-44f03b9c0bea","Type":"ContainerStarted","Data":"6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a"} Jan 21 18:57:39 crc kubenswrapper[4799]: I0121 18:57:39.226086 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wxx8v" podStartSLOduration=3.622629993 podStartE2EDuration="7.226065437s" podCreationTimestamp="2026-01-21 18:57:32 +0000 UTC" firstStartedPulling="2026-01-21 18:57:34.147107189 +0000 UTC m=+5080.773397222" lastFinishedPulling="2026-01-21 18:57:37.750542643 +0000 UTC m=+5084.376832666" observedRunningTime="2026-01-21 18:57:39.220568431 +0000 UTC m=+5085.846858464" watchObservedRunningTime="2026-01-21 18:57:39.226065437 +0000 UTC m=+5085.852355470" Jan 21 18:57:42 crc kubenswrapper[4799]: I0121 18:57:42.547120 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:42 crc kubenswrapper[4799]: I0121 18:57:42.548427 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:42 crc kubenswrapper[4799]: I0121 18:57:42.609228 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:43 crc kubenswrapper[4799]: I0121 18:57:43.275987 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:43 crc kubenswrapper[4799]: I0121 18:57:43.326903 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wxx8v"] Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.248327 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wxx8v" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerName="registry-server" containerID="cri-o://6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a" gracePeriod=2 Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.782074 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.866934 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-utilities\") pod \"a8d966e7-465a-468b-a097-44f03b9c0bea\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.867083 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-catalog-content\") pod \"a8d966e7-465a-468b-a097-44f03b9c0bea\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.867233 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8pbw\" (UniqueName: \"kubernetes.io/projected/a8d966e7-465a-468b-a097-44f03b9c0bea-kube-api-access-m8pbw\") pod \"a8d966e7-465a-468b-a097-44f03b9c0bea\" (UID: \"a8d966e7-465a-468b-a097-44f03b9c0bea\") " Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.868302 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-utilities" (OuterVolumeSpecName: "utilities") pod "a8d966e7-465a-468b-a097-44f03b9c0bea" (UID: "a8d966e7-465a-468b-a097-44f03b9c0bea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.876530 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8d966e7-465a-468b-a097-44f03b9c0bea-kube-api-access-m8pbw" (OuterVolumeSpecName: "kube-api-access-m8pbw") pod "a8d966e7-465a-468b-a097-44f03b9c0bea" (UID: "a8d966e7-465a-468b-a097-44f03b9c0bea"). InnerVolumeSpecName "kube-api-access-m8pbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.969762 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.969811 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8pbw\" (UniqueName: \"kubernetes.io/projected/a8d966e7-465a-468b-a097-44f03b9c0bea-kube-api-access-m8pbw\") on node \"crc\" DevicePath \"\"" Jan 21 18:57:45 crc kubenswrapper[4799]: I0121 18:57:45.996527 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8d966e7-465a-468b-a097-44f03b9c0bea" (UID: "a8d966e7-465a-468b-a097-44f03b9c0bea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.072188 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8d966e7-465a-468b-a097-44f03b9c0bea-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.261704 4799 generic.go:334] "Generic (PLEG): container finished" podID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerID="6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a" exitCode=0 Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.261756 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wxx8v" event={"ID":"a8d966e7-465a-468b-a097-44f03b9c0bea","Type":"ContainerDied","Data":"6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a"} Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.261788 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wxx8v" event={"ID":"a8d966e7-465a-468b-a097-44f03b9c0bea","Type":"ContainerDied","Data":"c30785998192c3c69f7f11f7421c6fcc8ac4a6f1f97798c659d931f2e79859da"} Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.261774 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wxx8v" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.261812 4799 scope.go:117] "RemoveContainer" containerID="6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.289121 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wxx8v"] Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.298987 4799 scope.go:117] "RemoveContainer" containerID="5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.300473 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wxx8v"] Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.343895 4799 scope.go:117] "RemoveContainer" containerID="08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.389433 4799 scope.go:117] "RemoveContainer" containerID="6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a" Jan 21 18:57:46 crc kubenswrapper[4799]: E0121 18:57:46.390256 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a\": container with ID starting with 6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a not found: ID does not exist" containerID="6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.390291 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a"} err="failed to get container status \"6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a\": rpc error: code = NotFound desc = could not find container \"6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a\": container with ID starting with 6a503a4dbd214805f7c0fec2546e0893052dcba841118a5bcffbe583b7d0191a not found: ID does not exist" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.390317 4799 scope.go:117] "RemoveContainer" containerID="5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de" Jan 21 18:57:46 crc kubenswrapper[4799]: E0121 18:57:46.390799 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de\": container with ID starting with 5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de not found: ID does not exist" containerID="5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.390823 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de"} err="failed to get container status \"5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de\": rpc error: code = NotFound desc = could not find container \"5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de\": container with ID starting with 5eb7210ca842204c386c4cf047ab9c249c8dae7f08ee157a87c6e8f15eae23de not found: ID does not exist" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.390843 4799 scope.go:117] "RemoveContainer" containerID="08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0" Jan 21 18:57:46 crc kubenswrapper[4799]: E0121 18:57:46.391338 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0\": container with ID starting with 08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0 not found: ID does not exist" containerID="08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0" Jan 21 18:57:46 crc kubenswrapper[4799]: I0121 18:57:46.391361 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0"} err="failed to get container status \"08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0\": rpc error: code = NotFound desc = could not find container \"08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0\": container with ID starting with 08e505f4e759f7e82081b35aa1b7c670c3399d077ed1a6c5cf4fd1a7b920ace0 not found: ID does not exist" Jan 21 18:57:48 crc kubenswrapper[4799]: I0121 18:57:48.216362 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" path="/var/lib/kubelet/pods/a8d966e7-465a-468b-a097-44f03b9c0bea/volumes" Jan 21 18:57:55 crc kubenswrapper[4799]: I0121 18:57:55.971108 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 18:57:55 crc kubenswrapper[4799]: I0121 18:57:55.971842 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 18:57:55 crc kubenswrapper[4799]: I0121 18:57:55.971922 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 18:57:55 crc kubenswrapper[4799]: I0121 18:57:55.973047 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ea14aa4ba04a0d2455a9611b11a13b3eb56975233f138e469cb13f8c18deacd0"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 18:57:55 crc kubenswrapper[4799]: I0121 18:57:55.973167 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://ea14aa4ba04a0d2455a9611b11a13b3eb56975233f138e469cb13f8c18deacd0" gracePeriod=600 Jan 21 18:57:56 crc kubenswrapper[4799]: I0121 18:57:56.366733 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="ea14aa4ba04a0d2455a9611b11a13b3eb56975233f138e469cb13f8c18deacd0" exitCode=0 Jan 21 18:57:56 crc kubenswrapper[4799]: I0121 18:57:56.366791 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"ea14aa4ba04a0d2455a9611b11a13b3eb56975233f138e469cb13f8c18deacd0"} Jan 21 18:57:56 crc kubenswrapper[4799]: I0121 18:57:56.366836 4799 scope.go:117] "RemoveContainer" containerID="95d7582994993507b1c273e09533a0d8f5bff269b4571832a50262a00ab0900c" Jan 21 18:57:57 crc kubenswrapper[4799]: I0121 18:57:57.378857 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794"} Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.160866 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw"] Jan 21 19:00:00 crc kubenswrapper[4799]: E0121 19:00:00.161800 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerName="extract-content" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.161814 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerName="extract-content" Jan 21 19:00:00 crc kubenswrapper[4799]: E0121 19:00:00.161893 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerName="registry-server" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.161903 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerName="registry-server" Jan 21 19:00:00 crc kubenswrapper[4799]: E0121 19:00:00.161936 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerName="extract-utilities" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.161942 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerName="extract-utilities" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.162167 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8d966e7-465a-468b-a097-44f03b9c0bea" containerName="registry-server" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.163041 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.166919 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.167366 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.180581 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55e0a9f8-9a15-421c-9cc8-736d51c3b075-secret-volume\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.180660 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55e0a9f8-9a15-421c-9cc8-736d51c3b075-config-volume\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.180843 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9s8b\" (UniqueName: \"kubernetes.io/projected/55e0a9f8-9a15-421c-9cc8-736d51c3b075-kube-api-access-h9s8b\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.188101 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw"] Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.282329 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55e0a9f8-9a15-421c-9cc8-736d51c3b075-secret-volume\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.282390 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55e0a9f8-9a15-421c-9cc8-736d51c3b075-config-volume\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.282563 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9s8b\" (UniqueName: \"kubernetes.io/projected/55e0a9f8-9a15-421c-9cc8-736d51c3b075-kube-api-access-h9s8b\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.285245 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55e0a9f8-9a15-421c-9cc8-736d51c3b075-config-volume\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.290237 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55e0a9f8-9a15-421c-9cc8-736d51c3b075-secret-volume\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.309118 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9s8b\" (UniqueName: \"kubernetes.io/projected/55e0a9f8-9a15-421c-9cc8-736d51c3b075-kube-api-access-h9s8b\") pod \"collect-profiles-29483700-srmsw\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:00 crc kubenswrapper[4799]: I0121 19:00:00.498896 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:01 crc kubenswrapper[4799]: I0121 19:00:01.021685 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw"] Jan 21 19:00:01 crc kubenswrapper[4799]: I0121 19:00:01.637008 4799 generic.go:334] "Generic (PLEG): container finished" podID="55e0a9f8-9a15-421c-9cc8-736d51c3b075" containerID="083fe4d68b470139eb69d559ac34e85d34fd2fa4eeab5650532b25bd3d07d30d" exitCode=0 Jan 21 19:00:01 crc kubenswrapper[4799]: I0121 19:00:01.637066 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" event={"ID":"55e0a9f8-9a15-421c-9cc8-736d51c3b075","Type":"ContainerDied","Data":"083fe4d68b470139eb69d559ac34e85d34fd2fa4eeab5650532b25bd3d07d30d"} Jan 21 19:00:01 crc kubenswrapper[4799]: I0121 19:00:01.637336 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" event={"ID":"55e0a9f8-9a15-421c-9cc8-736d51c3b075","Type":"ContainerStarted","Data":"e66cb4a856cace0f05c9924c12993c88b8d444c72796d9bf7ba66d5504ac4552"} Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.036679 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.043383 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9s8b\" (UniqueName: \"kubernetes.io/projected/55e0a9f8-9a15-421c-9cc8-736d51c3b075-kube-api-access-h9s8b\") pod \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.043447 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55e0a9f8-9a15-421c-9cc8-736d51c3b075-config-volume\") pod \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.043664 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55e0a9f8-9a15-421c-9cc8-736d51c3b075-secret-volume\") pod \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\" (UID: \"55e0a9f8-9a15-421c-9cc8-736d51c3b075\") " Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.045842 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55e0a9f8-9a15-421c-9cc8-736d51c3b075-config-volume" (OuterVolumeSpecName: "config-volume") pod "55e0a9f8-9a15-421c-9cc8-736d51c3b075" (UID: "55e0a9f8-9a15-421c-9cc8-736d51c3b075"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.087219 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55e0a9f8-9a15-421c-9cc8-736d51c3b075-kube-api-access-h9s8b" (OuterVolumeSpecName: "kube-api-access-h9s8b") pod "55e0a9f8-9a15-421c-9cc8-736d51c3b075" (UID: "55e0a9f8-9a15-421c-9cc8-736d51c3b075"). InnerVolumeSpecName "kube-api-access-h9s8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.087844 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55e0a9f8-9a15-421c-9cc8-736d51c3b075-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "55e0a9f8-9a15-421c-9cc8-736d51c3b075" (UID: "55e0a9f8-9a15-421c-9cc8-736d51c3b075"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.146173 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55e0a9f8-9a15-421c-9cc8-736d51c3b075-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.146214 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9s8b\" (UniqueName: \"kubernetes.io/projected/55e0a9f8-9a15-421c-9cc8-736d51c3b075-kube-api-access-h9s8b\") on node \"crc\" DevicePath \"\"" Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.146229 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55e0a9f8-9a15-421c-9cc8-736d51c3b075-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.658166 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.658096 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483700-srmsw" event={"ID":"55e0a9f8-9a15-421c-9cc8-736d51c3b075","Type":"ContainerDied","Data":"e66cb4a856cace0f05c9924c12993c88b8d444c72796d9bf7ba66d5504ac4552"} Jan 21 19:00:03 crc kubenswrapper[4799]: I0121 19:00:03.658638 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e66cb4a856cace0f05c9924c12993c88b8d444c72796d9bf7ba66d5504ac4552" Jan 21 19:00:04 crc kubenswrapper[4799]: I0121 19:00:04.143247 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf"] Jan 21 19:00:04 crc kubenswrapper[4799]: I0121 19:00:04.153512 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483655-5h8wf"] Jan 21 19:00:04 crc kubenswrapper[4799]: I0121 19:00:04.218997 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c23b2345-2c6a-44dc-9cb6-a9a53981011c" path="/var/lib/kubelet/pods/c23b2345-2c6a-44dc-9cb6-a9a53981011c/volumes" Jan 21 19:00:25 crc kubenswrapper[4799]: I0121 19:00:25.971485 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:00:25 crc kubenswrapper[4799]: I0121 19:00:25.972099 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:00:41 crc kubenswrapper[4799]: I0121 19:00:41.549825 4799 scope.go:117] "RemoveContainer" containerID="7490a6b21ca06b5b208fdf5be82ebdd0189dd80ac532993b7537d8b4f958dda2" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.090074 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bqnw5"] Jan 21 19:00:51 crc kubenswrapper[4799]: E0121 19:00:51.092981 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e0a9f8-9a15-421c-9cc8-736d51c3b075" containerName="collect-profiles" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.093015 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e0a9f8-9a15-421c-9cc8-736d51c3b075" containerName="collect-profiles" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.093548 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="55e0a9f8-9a15-421c-9cc8-736d51c3b075" containerName="collect-profiles" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.095711 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.130395 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lw2z\" (UniqueName: \"kubernetes.io/projected/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-kube-api-access-4lw2z\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.130729 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-catalog-content\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.131201 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-utilities\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.134861 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqnw5"] Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.233415 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-utilities\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.233863 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lw2z\" (UniqueName: \"kubernetes.io/projected/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-kube-api-access-4lw2z\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.233971 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-catalog-content\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.234053 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-utilities\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.234541 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-catalog-content\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.256934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lw2z\" (UniqueName: \"kubernetes.io/projected/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-kube-api-access-4lw2z\") pod \"redhat-marketplace-bqnw5\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.424174 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:00:51 crc kubenswrapper[4799]: I0121 19:00:51.997497 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqnw5"] Jan 21 19:00:52 crc kubenswrapper[4799]: I0121 19:00:52.173053 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqnw5" event={"ID":"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf","Type":"ContainerStarted","Data":"3b6574eab180366bbcd3c3edb3561550c392f6045a91f9ec251e4f7af7905814"} Jan 21 19:00:53 crc kubenswrapper[4799]: I0121 19:00:53.183999 4799 generic.go:334] "Generic (PLEG): container finished" podID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerID="09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f" exitCode=0 Jan 21 19:00:53 crc kubenswrapper[4799]: I0121 19:00:53.184185 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqnw5" event={"ID":"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf","Type":"ContainerDied","Data":"09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f"} Jan 21 19:00:55 crc kubenswrapper[4799]: I0121 19:00:55.210903 4799 generic.go:334] "Generic (PLEG): container finished" podID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerID="0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977" exitCode=0 Jan 21 19:00:55 crc kubenswrapper[4799]: I0121 19:00:55.210983 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqnw5" event={"ID":"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf","Type":"ContainerDied","Data":"0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977"} Jan 21 19:00:55 crc kubenswrapper[4799]: I0121 19:00:55.971003 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:00:55 crc kubenswrapper[4799]: I0121 19:00:55.971417 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:00:56 crc kubenswrapper[4799]: I0121 19:00:56.224046 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqnw5" event={"ID":"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf","Type":"ContainerStarted","Data":"d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f"} Jan 21 19:00:56 crc kubenswrapper[4799]: I0121 19:00:56.254817 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bqnw5" podStartSLOduration=2.757600617 podStartE2EDuration="5.254784303s" podCreationTimestamp="2026-01-21 19:00:51 +0000 UTC" firstStartedPulling="2026-01-21 19:00:53.186870457 +0000 UTC m=+5279.813160490" lastFinishedPulling="2026-01-21 19:00:55.684054143 +0000 UTC m=+5282.310344176" observedRunningTime="2026-01-21 19:00:56.250828801 +0000 UTC m=+5282.877118834" watchObservedRunningTime="2026-01-21 19:00:56.254784303 +0000 UTC m=+5282.881074336" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.175848 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29483701-6vvv2"] Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.178845 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.218916 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29483701-6vvv2"] Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.281608 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-combined-ca-bundle\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.281766 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-config-data\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.281798 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxqsh\" (UniqueName: \"kubernetes.io/projected/7295fe64-03cb-4766-934e-cbd3eace9c00-kube-api-access-pxqsh\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.281850 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-fernet-keys\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.383910 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-fernet-keys\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.384588 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-combined-ca-bundle\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.384677 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-config-data\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.384707 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxqsh\" (UniqueName: \"kubernetes.io/projected/7295fe64-03cb-4766-934e-cbd3eace9c00-kube-api-access-pxqsh\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.390583 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-config-data\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.391311 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-fernet-keys\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.399803 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-combined-ca-bundle\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.404506 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxqsh\" (UniqueName: \"kubernetes.io/projected/7295fe64-03cb-4766-934e-cbd3eace9c00-kube-api-access-pxqsh\") pod \"keystone-cron-29483701-6vvv2\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:00 crc kubenswrapper[4799]: I0121 19:01:00.526498 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:01 crc kubenswrapper[4799]: I0121 19:01:01.015223 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29483701-6vvv2"] Jan 21 19:01:01 crc kubenswrapper[4799]: W0121 19:01:01.040373 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7295fe64_03cb_4766_934e_cbd3eace9c00.slice/crio-d7ef414d3aa4eb7ea8b6eef042b014523b5782a98f64bcf6421f24b6e899d4f0 WatchSource:0}: Error finding container d7ef414d3aa4eb7ea8b6eef042b014523b5782a98f64bcf6421f24b6e899d4f0: Status 404 returned error can't find the container with id d7ef414d3aa4eb7ea8b6eef042b014523b5782a98f64bcf6421f24b6e899d4f0 Jan 21 19:01:01 crc kubenswrapper[4799]: I0121 19:01:01.278581 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29483701-6vvv2" event={"ID":"7295fe64-03cb-4766-934e-cbd3eace9c00","Type":"ContainerStarted","Data":"eabcf041b57000e7308fb4d6982bd0112b560db2a57ea5b9eda46e200ab53b39"} Jan 21 19:01:01 crc kubenswrapper[4799]: I0121 19:01:01.278886 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29483701-6vvv2" event={"ID":"7295fe64-03cb-4766-934e-cbd3eace9c00","Type":"ContainerStarted","Data":"d7ef414d3aa4eb7ea8b6eef042b014523b5782a98f64bcf6421f24b6e899d4f0"} Jan 21 19:01:01 crc kubenswrapper[4799]: I0121 19:01:01.304889 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29483701-6vvv2" podStartSLOduration=1.304864112 podStartE2EDuration="1.304864112s" podCreationTimestamp="2026-01-21 19:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 19:01:01.297974297 +0000 UTC m=+5287.924264360" watchObservedRunningTime="2026-01-21 19:01:01.304864112 +0000 UTC m=+5287.931154135" Jan 21 19:01:01 crc kubenswrapper[4799]: I0121 19:01:01.425877 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:01:01 crc kubenswrapper[4799]: I0121 19:01:01.425923 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:01:01 crc kubenswrapper[4799]: I0121 19:01:01.482219 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:01:02 crc kubenswrapper[4799]: I0121 19:01:02.380299 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:01:02 crc kubenswrapper[4799]: I0121 19:01:02.429871 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqnw5"] Jan 21 19:01:04 crc kubenswrapper[4799]: I0121 19:01:04.310856 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bqnw5" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerName="registry-server" containerID="cri-o://d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f" gracePeriod=2 Jan 21 19:01:04 crc kubenswrapper[4799]: I0121 19:01:04.876405 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:01:04 crc kubenswrapper[4799]: I0121 19:01:04.998394 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-utilities\") pod \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " Jan 21 19:01:04 crc kubenswrapper[4799]: I0121 19:01:04.998505 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lw2z\" (UniqueName: \"kubernetes.io/projected/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-kube-api-access-4lw2z\") pod \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " Jan 21 19:01:04 crc kubenswrapper[4799]: I0121 19:01:04.998676 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-catalog-content\") pod \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\" (UID: \"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf\") " Jan 21 19:01:04 crc kubenswrapper[4799]: I0121 19:01:04.999701 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-utilities" (OuterVolumeSpecName: "utilities") pod "6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" (UID: "6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.006721 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-kube-api-access-4lw2z" (OuterVolumeSpecName: "kube-api-access-4lw2z") pod "6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" (UID: "6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf"). InnerVolumeSpecName "kube-api-access-4lw2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.025034 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" (UID: "6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.101958 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.101988 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lw2z\" (UniqueName: \"kubernetes.io/projected/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-kube-api-access-4lw2z\") on node \"crc\" DevicePath \"\"" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.101999 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.325858 4799 generic.go:334] "Generic (PLEG): container finished" podID="7295fe64-03cb-4766-934e-cbd3eace9c00" containerID="eabcf041b57000e7308fb4d6982bd0112b560db2a57ea5b9eda46e200ab53b39" exitCode=0 Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.326019 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29483701-6vvv2" event={"ID":"7295fe64-03cb-4766-934e-cbd3eace9c00","Type":"ContainerDied","Data":"eabcf041b57000e7308fb4d6982bd0112b560db2a57ea5b9eda46e200ab53b39"} Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.331416 4799 generic.go:334] "Generic (PLEG): container finished" podID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerID="d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f" exitCode=0 Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.331479 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqnw5" event={"ID":"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf","Type":"ContainerDied","Data":"d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f"} Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.331509 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqnw5" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.331536 4799 scope.go:117] "RemoveContainer" containerID="d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.331518 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqnw5" event={"ID":"6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf","Type":"ContainerDied","Data":"3b6574eab180366bbcd3c3edb3561550c392f6045a91f9ec251e4f7af7905814"} Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.386281 4799 scope.go:117] "RemoveContainer" containerID="0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.406889 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqnw5"] Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.419861 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqnw5"] Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.436422 4799 scope.go:117] "RemoveContainer" containerID="09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.488345 4799 scope.go:117] "RemoveContainer" containerID="d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f" Jan 21 19:01:05 crc kubenswrapper[4799]: E0121 19:01:05.489059 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f\": container with ID starting with d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f not found: ID does not exist" containerID="d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.489276 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f"} err="failed to get container status \"d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f\": rpc error: code = NotFound desc = could not find container \"d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f\": container with ID starting with d40a67f36a940902aeca63db7980493ff6e917ad25c6b68e16ae11b932f7c56f not found: ID does not exist" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.489419 4799 scope.go:117] "RemoveContainer" containerID="0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977" Jan 21 19:01:05 crc kubenswrapper[4799]: E0121 19:01:05.489884 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977\": container with ID starting with 0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977 not found: ID does not exist" containerID="0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.490019 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977"} err="failed to get container status \"0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977\": rpc error: code = NotFound desc = could not find container \"0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977\": container with ID starting with 0a075901f72f83d7dc6e3fa6747e8f02e6007f3e44677df85c873ff52f851977 not found: ID does not exist" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.490117 4799 scope.go:117] "RemoveContainer" containerID="09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f" Jan 21 19:01:05 crc kubenswrapper[4799]: E0121 19:01:05.490557 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f\": container with ID starting with 09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f not found: ID does not exist" containerID="09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f" Jan 21 19:01:05 crc kubenswrapper[4799]: I0121 19:01:05.490684 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f"} err="failed to get container status \"09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f\": rpc error: code = NotFound desc = could not find container \"09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f\": container with ID starting with 09fb0ef0fedafbf5ec4f80d7de75079f0e43cf3818b38ca8e55b256e01ae175f not found: ID does not exist" Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.222194 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" path="/var/lib/kubelet/pods/6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf/volumes" Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.782068 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.940922 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-fernet-keys\") pod \"7295fe64-03cb-4766-934e-cbd3eace9c00\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.941096 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-config-data\") pod \"7295fe64-03cb-4766-934e-cbd3eace9c00\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.941205 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-combined-ca-bundle\") pod \"7295fe64-03cb-4766-934e-cbd3eace9c00\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.941258 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxqsh\" (UniqueName: \"kubernetes.io/projected/7295fe64-03cb-4766-934e-cbd3eace9c00-kube-api-access-pxqsh\") pod \"7295fe64-03cb-4766-934e-cbd3eace9c00\" (UID: \"7295fe64-03cb-4766-934e-cbd3eace9c00\") " Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.949308 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7295fe64-03cb-4766-934e-cbd3eace9c00" (UID: "7295fe64-03cb-4766-934e-cbd3eace9c00"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.971191 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7295fe64-03cb-4766-934e-cbd3eace9c00-kube-api-access-pxqsh" (OuterVolumeSpecName: "kube-api-access-pxqsh") pod "7295fe64-03cb-4766-934e-cbd3eace9c00" (UID: "7295fe64-03cb-4766-934e-cbd3eace9c00"). InnerVolumeSpecName "kube-api-access-pxqsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:01:06 crc kubenswrapper[4799]: I0121 19:01:06.977428 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7295fe64-03cb-4766-934e-cbd3eace9c00" (UID: "7295fe64-03cb-4766-934e-cbd3eace9c00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:01:07 crc kubenswrapper[4799]: I0121 19:01:07.009385 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-config-data" (OuterVolumeSpecName: "config-data") pod "7295fe64-03cb-4766-934e-cbd3eace9c00" (UID: "7295fe64-03cb-4766-934e-cbd3eace9c00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:01:07 crc kubenswrapper[4799]: I0121 19:01:07.045560 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxqsh\" (UniqueName: \"kubernetes.io/projected/7295fe64-03cb-4766-934e-cbd3eace9c00-kube-api-access-pxqsh\") on node \"crc\" DevicePath \"\"" Jan 21 19:01:07 crc kubenswrapper[4799]: I0121 19:01:07.045609 4799 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 21 19:01:07 crc kubenswrapper[4799]: I0121 19:01:07.045623 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 19:01:07 crc kubenswrapper[4799]: I0121 19:01:07.045635 4799 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7295fe64-03cb-4766-934e-cbd3eace9c00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 21 19:01:07 crc kubenswrapper[4799]: I0121 19:01:07.358541 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29483701-6vvv2" event={"ID":"7295fe64-03cb-4766-934e-cbd3eace9c00","Type":"ContainerDied","Data":"d7ef414d3aa4eb7ea8b6eef042b014523b5782a98f64bcf6421f24b6e899d4f0"} Jan 21 19:01:07 crc kubenswrapper[4799]: I0121 19:01:07.358826 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7ef414d3aa4eb7ea8b6eef042b014523b5782a98f64bcf6421f24b6e899d4f0" Jan 21 19:01:07 crc kubenswrapper[4799]: I0121 19:01:07.358622 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29483701-6vvv2" Jan 21 19:01:25 crc kubenswrapper[4799]: I0121 19:01:25.971397 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:01:25 crc kubenswrapper[4799]: I0121 19:01:25.971997 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:01:25 crc kubenswrapper[4799]: I0121 19:01:25.972069 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 19:01:25 crc kubenswrapper[4799]: I0121 19:01:25.973056 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 19:01:25 crc kubenswrapper[4799]: I0121 19:01:25.973143 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" gracePeriod=600 Jan 21 19:01:26 crc kubenswrapper[4799]: E0121 19:01:26.107172 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:01:26 crc kubenswrapper[4799]: I0121 19:01:26.583307 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" exitCode=0 Jan 21 19:01:26 crc kubenswrapper[4799]: I0121 19:01:26.583360 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794"} Jan 21 19:01:26 crc kubenswrapper[4799]: I0121 19:01:26.583398 4799 scope.go:117] "RemoveContainer" containerID="ea14aa4ba04a0d2455a9611b11a13b3eb56975233f138e469cb13f8c18deacd0" Jan 21 19:01:26 crc kubenswrapper[4799]: I0121 19:01:26.584297 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:01:26 crc kubenswrapper[4799]: E0121 19:01:26.584660 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:01:41 crc kubenswrapper[4799]: I0121 19:01:41.205811 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:01:41 crc kubenswrapper[4799]: E0121 19:01:41.206800 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:01:55 crc kubenswrapper[4799]: I0121 19:01:55.205538 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:01:55 crc kubenswrapper[4799]: E0121 19:01:55.206453 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:02:06 crc kubenswrapper[4799]: I0121 19:02:06.205878 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:02:06 crc kubenswrapper[4799]: E0121 19:02:06.206594 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:02:17 crc kubenswrapper[4799]: I0121 19:02:17.205601 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:02:17 crc kubenswrapper[4799]: E0121 19:02:17.206594 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.151433 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xt9qj"] Jan 21 19:02:25 crc kubenswrapper[4799]: E0121 19:02:25.152443 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7295fe64-03cb-4766-934e-cbd3eace9c00" containerName="keystone-cron" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.152459 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="7295fe64-03cb-4766-934e-cbd3eace9c00" containerName="keystone-cron" Jan 21 19:02:25 crc kubenswrapper[4799]: E0121 19:02:25.152474 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerName="extract-content" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.152479 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerName="extract-content" Jan 21 19:02:25 crc kubenswrapper[4799]: E0121 19:02:25.152491 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerName="extract-utilities" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.152497 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerName="extract-utilities" Jan 21 19:02:25 crc kubenswrapper[4799]: E0121 19:02:25.152513 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerName="registry-server" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.152519 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerName="registry-server" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.152732 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bbe69e0-1366-4b22-8e99-ff52bf9a3cbf" containerName="registry-server" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.152758 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="7295fe64-03cb-4766-934e-cbd3eace9c00" containerName="keystone-cron" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.154322 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.172147 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xt9qj"] Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.288640 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-catalog-content\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.288771 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxlnp\" (UniqueName: \"kubernetes.io/projected/c805b21e-1e30-4607-89a6-22340a73eadc-kube-api-access-rxlnp\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.288860 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-utilities\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.390769 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-catalog-content\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.390852 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxlnp\" (UniqueName: \"kubernetes.io/projected/c805b21e-1e30-4607-89a6-22340a73eadc-kube-api-access-rxlnp\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.390933 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-utilities\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.391340 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-catalog-content\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.391352 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-utilities\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.419051 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxlnp\" (UniqueName: \"kubernetes.io/projected/c805b21e-1e30-4607-89a6-22340a73eadc-kube-api-access-rxlnp\") pod \"redhat-operators-xt9qj\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:25 crc kubenswrapper[4799]: I0121 19:02:25.479173 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:26 crc kubenswrapper[4799]: I0121 19:02:26.026954 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xt9qj"] Jan 21 19:02:26 crc kubenswrapper[4799]: I0121 19:02:26.201722 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xt9qj" event={"ID":"c805b21e-1e30-4607-89a6-22340a73eadc","Type":"ContainerStarted","Data":"c9909231046649356b44d9d5bc86edd6eeb7b7aa7fb61c88bd7f1b75145d782c"} Jan 21 19:02:27 crc kubenswrapper[4799]: I0121 19:02:27.216100 4799 generic.go:334] "Generic (PLEG): container finished" podID="c805b21e-1e30-4607-89a6-22340a73eadc" containerID="e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02" exitCode=0 Jan 21 19:02:27 crc kubenswrapper[4799]: I0121 19:02:27.216180 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xt9qj" event={"ID":"c805b21e-1e30-4607-89a6-22340a73eadc","Type":"ContainerDied","Data":"e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02"} Jan 21 19:02:29 crc kubenswrapper[4799]: I0121 19:02:29.206534 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:02:29 crc kubenswrapper[4799]: E0121 19:02:29.208210 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:02:29 crc kubenswrapper[4799]: I0121 19:02:29.250906 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xt9qj" event={"ID":"c805b21e-1e30-4607-89a6-22340a73eadc","Type":"ContainerStarted","Data":"5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201"} Jan 21 19:02:32 crc kubenswrapper[4799]: I0121 19:02:32.288479 4799 generic.go:334] "Generic (PLEG): container finished" podID="c805b21e-1e30-4607-89a6-22340a73eadc" containerID="5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201" exitCode=0 Jan 21 19:02:32 crc kubenswrapper[4799]: I0121 19:02:32.288561 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xt9qj" event={"ID":"c805b21e-1e30-4607-89a6-22340a73eadc","Type":"ContainerDied","Data":"5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201"} Jan 21 19:02:33 crc kubenswrapper[4799]: I0121 19:02:33.299614 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xt9qj" event={"ID":"c805b21e-1e30-4607-89a6-22340a73eadc","Type":"ContainerStarted","Data":"d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee"} Jan 21 19:02:33 crc kubenswrapper[4799]: I0121 19:02:33.333529 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xt9qj" podStartSLOduration=2.805895892 podStartE2EDuration="8.333509833s" podCreationTimestamp="2026-01-21 19:02:25 +0000 UTC" firstStartedPulling="2026-01-21 19:02:27.222459842 +0000 UTC m=+5373.848749865" lastFinishedPulling="2026-01-21 19:02:32.750073773 +0000 UTC m=+5379.376363806" observedRunningTime="2026-01-21 19:02:33.325264109 +0000 UTC m=+5379.951554142" watchObservedRunningTime="2026-01-21 19:02:33.333509833 +0000 UTC m=+5379.959799846" Jan 21 19:02:35 crc kubenswrapper[4799]: I0121 19:02:35.480084 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:35 crc kubenswrapper[4799]: I0121 19:02:35.480731 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:36 crc kubenswrapper[4799]: I0121 19:02:36.529876 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xt9qj" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="registry-server" probeResult="failure" output=< Jan 21 19:02:36 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 19:02:36 crc kubenswrapper[4799]: > Jan 21 19:02:43 crc kubenswrapper[4799]: I0121 19:02:43.205537 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:02:43 crc kubenswrapper[4799]: E0121 19:02:43.206371 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:02:45 crc kubenswrapper[4799]: I0121 19:02:45.745848 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:45 crc kubenswrapper[4799]: I0121 19:02:45.811463 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:45 crc kubenswrapper[4799]: I0121 19:02:45.986988 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xt9qj"] Jan 21 19:02:47 crc kubenswrapper[4799]: I0121 19:02:47.455464 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xt9qj" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="registry-server" containerID="cri-o://d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee" gracePeriod=2 Jan 21 19:02:47 crc kubenswrapper[4799]: I0121 19:02:47.959162 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.115990 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-catalog-content\") pod \"c805b21e-1e30-4607-89a6-22340a73eadc\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.116680 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxlnp\" (UniqueName: \"kubernetes.io/projected/c805b21e-1e30-4607-89a6-22340a73eadc-kube-api-access-rxlnp\") pod \"c805b21e-1e30-4607-89a6-22340a73eadc\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.116867 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-utilities\") pod \"c805b21e-1e30-4607-89a6-22340a73eadc\" (UID: \"c805b21e-1e30-4607-89a6-22340a73eadc\") " Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.117601 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-utilities" (OuterVolumeSpecName: "utilities") pod "c805b21e-1e30-4607-89a6-22340a73eadc" (UID: "c805b21e-1e30-4607-89a6-22340a73eadc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.118741 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.137107 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c805b21e-1e30-4607-89a6-22340a73eadc-kube-api-access-rxlnp" (OuterVolumeSpecName: "kube-api-access-rxlnp") pod "c805b21e-1e30-4607-89a6-22340a73eadc" (UID: "c805b21e-1e30-4607-89a6-22340a73eadc"). InnerVolumeSpecName "kube-api-access-rxlnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.226986 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxlnp\" (UniqueName: \"kubernetes.io/projected/c805b21e-1e30-4607-89a6-22340a73eadc-kube-api-access-rxlnp\") on node \"crc\" DevicePath \"\"" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.248743 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c805b21e-1e30-4607-89a6-22340a73eadc" (UID: "c805b21e-1e30-4607-89a6-22340a73eadc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.329925 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c805b21e-1e30-4607-89a6-22340a73eadc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.476236 4799 generic.go:334] "Generic (PLEG): container finished" podID="c805b21e-1e30-4607-89a6-22340a73eadc" containerID="d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee" exitCode=0 Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.476292 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xt9qj" event={"ID":"c805b21e-1e30-4607-89a6-22340a73eadc","Type":"ContainerDied","Data":"d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee"} Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.476342 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xt9qj" event={"ID":"c805b21e-1e30-4607-89a6-22340a73eadc","Type":"ContainerDied","Data":"c9909231046649356b44d9d5bc86edd6eeb7b7aa7fb61c88bd7f1b75145d782c"} Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.476377 4799 scope.go:117] "RemoveContainer" containerID="d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.477417 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xt9qj" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.526381 4799 scope.go:117] "RemoveContainer" containerID="5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.540363 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xt9qj"] Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.549036 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xt9qj"] Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.557580 4799 scope.go:117] "RemoveContainer" containerID="e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.605966 4799 scope.go:117] "RemoveContainer" containerID="d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee" Jan 21 19:02:48 crc kubenswrapper[4799]: E0121 19:02:48.606637 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee\": container with ID starting with d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee not found: ID does not exist" containerID="d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.606693 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee"} err="failed to get container status \"d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee\": rpc error: code = NotFound desc = could not find container \"d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee\": container with ID starting with d29f02cc63a14cafe2dcc053df92a0638a9a39801081ac69f358374df4b3c6ee not found: ID does not exist" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.606730 4799 scope.go:117] "RemoveContainer" containerID="5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201" Jan 21 19:02:48 crc kubenswrapper[4799]: E0121 19:02:48.607473 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201\": container with ID starting with 5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201 not found: ID does not exist" containerID="5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.607614 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201"} err="failed to get container status \"5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201\": rpc error: code = NotFound desc = could not find container \"5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201\": container with ID starting with 5e81d040aad3cfac312b588b004d4d096ebac382b15466a350d3019246e38201 not found: ID does not exist" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.607646 4799 scope.go:117] "RemoveContainer" containerID="e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02" Jan 21 19:02:48 crc kubenswrapper[4799]: E0121 19:02:48.608003 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02\": container with ID starting with e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02 not found: ID does not exist" containerID="e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02" Jan 21 19:02:48 crc kubenswrapper[4799]: I0121 19:02:48.608035 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02"} err="failed to get container status \"e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02\": rpc error: code = NotFound desc = could not find container \"e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02\": container with ID starting with e3fbc1b9f483c143a9867b500652c3ac60566c747cb5529892d6dae2bab56e02 not found: ID does not exist" Jan 21 19:02:50 crc kubenswrapper[4799]: I0121 19:02:50.216578 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" path="/var/lib/kubelet/pods/c805b21e-1e30-4607-89a6-22340a73eadc/volumes" Jan 21 19:02:54 crc kubenswrapper[4799]: I0121 19:02:54.213324 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:02:54 crc kubenswrapper[4799]: E0121 19:02:54.214126 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:03:06 crc kubenswrapper[4799]: I0121 19:03:06.205693 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:03:06 crc kubenswrapper[4799]: E0121 19:03:06.206808 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:03:18 crc kubenswrapper[4799]: I0121 19:03:18.205782 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:03:18 crc kubenswrapper[4799]: E0121 19:03:18.206694 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:03:32 crc kubenswrapper[4799]: I0121 19:03:32.205763 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:03:32 crc kubenswrapper[4799]: E0121 19:03:32.206638 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:03:45 crc kubenswrapper[4799]: I0121 19:03:45.205792 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:03:45 crc kubenswrapper[4799]: E0121 19:03:45.206635 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:03:58 crc kubenswrapper[4799]: I0121 19:03:58.206319 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:03:58 crc kubenswrapper[4799]: E0121 19:03:58.207253 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:04:11 crc kubenswrapper[4799]: I0121 19:04:11.205813 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:04:11 crc kubenswrapper[4799]: E0121 19:04:11.206775 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:04:25 crc kubenswrapper[4799]: I0121 19:04:25.205667 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:04:25 crc kubenswrapper[4799]: E0121 19:04:25.206434 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:04:39 crc kubenswrapper[4799]: I0121 19:04:39.205385 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:04:39 crc kubenswrapper[4799]: E0121 19:04:39.206328 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:04:54 crc kubenswrapper[4799]: I0121 19:04:54.241390 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:04:54 crc kubenswrapper[4799]: E0121 19:04:54.242464 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:04:59 crc kubenswrapper[4799]: I0121 19:04:59.064202 4799 generic.go:334] "Generic (PLEG): container finished" podID="384bc0b0-0caa-45e3-b892-155def4ed881" containerID="4e1f24192c5c16b509a03af3a966e0e1f354e83f7aca4b01ba0c6702b84caed0" exitCode=0 Jan 21 19:04:59 crc kubenswrapper[4799]: I0121 19:04:59.064275 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"384bc0b0-0caa-45e3-b892-155def4ed881","Type":"ContainerDied","Data":"4e1f24192c5c16b509a03af3a966e0e1f354e83f7aca4b01ba0c6702b84caed0"} Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.465348 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.589513 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ssh-key\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.589604 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-config-data\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.589644 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-workdir\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.589704 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config-secret\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.589786 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.589815 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-temporary\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.589882 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.591116 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.591265 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ca-certs\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.591334 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkhww\" (UniqueName: \"kubernetes.io/projected/384bc0b0-0caa-45e3-b892-155def4ed881-kube-api-access-xkhww\") pod \"384bc0b0-0caa-45e3-b892-155def4ed881\" (UID: \"384bc0b0-0caa-45e3-b892-155def4ed881\") " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.591502 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-config-data" (OuterVolumeSpecName: "config-data") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.592536 4799 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-config-data\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.592568 4799 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.599551 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/384bc0b0-0caa-45e3-b892-155def4ed881-kube-api-access-xkhww" (OuterVolumeSpecName: "kube-api-access-xkhww") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "kube-api-access-xkhww". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.600281 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.605049 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.624518 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.625222 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.642161 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.662916 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "384bc0b0-0caa-45e3-b892-155def4ed881" (UID: "384bc0b0-0caa-45e3-b892-155def4ed881"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.694643 4799 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ssh-key\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.694686 4799 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/384bc0b0-0caa-45e3-b892-155def4ed881-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.694708 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.694763 4799 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.694778 4799 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/384bc0b0-0caa-45e3-b892-155def4ed881-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.694791 4799 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/384bc0b0-0caa-45e3-b892-155def4ed881-ca-certs\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.694805 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkhww\" (UniqueName: \"kubernetes.io/projected/384bc0b0-0caa-45e3-b892-155def4ed881-kube-api-access-xkhww\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.720896 4799 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 21 19:05:00 crc kubenswrapper[4799]: I0121 19:05:00.798025 4799 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 21 19:05:01 crc kubenswrapper[4799]: I0121 19:05:01.088932 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"384bc0b0-0caa-45e3-b892-155def4ed881","Type":"ContainerDied","Data":"001098c4d5fb85163635be7b8aa48e9ba2cfc4ff50b6ce93cc6bbe508fb6d6c8"} Jan 21 19:05:01 crc kubenswrapper[4799]: I0121 19:05:01.089271 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="001098c4d5fb85163635be7b8aa48e9ba2cfc4ff50b6ce93cc6bbe508fb6d6c8" Jan 21 19:05:01 crc kubenswrapper[4799]: I0121 19:05:01.089037 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.675892 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 21 19:05:03 crc kubenswrapper[4799]: E0121 19:05:03.676452 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="extract-content" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.676469 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="extract-content" Jan 21 19:05:03 crc kubenswrapper[4799]: E0121 19:05:03.676510 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="registry-server" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.676519 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="registry-server" Jan 21 19:05:03 crc kubenswrapper[4799]: E0121 19:05:03.676538 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="384bc0b0-0caa-45e3-b892-155def4ed881" containerName="tempest-tests-tempest-tests-runner" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.676547 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="384bc0b0-0caa-45e3-b892-155def4ed881" containerName="tempest-tests-tempest-tests-runner" Jan 21 19:05:03 crc kubenswrapper[4799]: E0121 19:05:03.676564 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="extract-utilities" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.676573 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="extract-utilities" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.676847 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="384bc0b0-0caa-45e3-b892-155def4ed881" containerName="tempest-tests-tempest-tests-runner" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.676873 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c805b21e-1e30-4607-89a6-22340a73eadc" containerName="registry-server" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.677934 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.685797 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-jqx6b" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.698560 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.774794 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqmbg\" (UniqueName: \"kubernetes.io/projected/254949e9-614e-419a-ba47-42bf8850d001-kube-api-access-zqmbg\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"254949e9-614e-419a-ba47-42bf8850d001\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.775012 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"254949e9-614e-419a-ba47-42bf8850d001\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.876933 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"254949e9-614e-419a-ba47-42bf8850d001\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.877085 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqmbg\" (UniqueName: \"kubernetes.io/projected/254949e9-614e-419a-ba47-42bf8850d001-kube-api-access-zqmbg\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"254949e9-614e-419a-ba47-42bf8850d001\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.878047 4799 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"254949e9-614e-419a-ba47-42bf8850d001\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.906590 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqmbg\" (UniqueName: \"kubernetes.io/projected/254949e9-614e-419a-ba47-42bf8850d001-kube-api-access-zqmbg\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"254949e9-614e-419a-ba47-42bf8850d001\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:03 crc kubenswrapper[4799]: I0121 19:05:03.930461 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"254949e9-614e-419a-ba47-42bf8850d001\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:04 crc kubenswrapper[4799]: I0121 19:05:04.011436 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Jan 21 19:05:04 crc kubenswrapper[4799]: I0121 19:05:04.504708 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Jan 21 19:05:04 crc kubenswrapper[4799]: I0121 19:05:04.512924 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 19:05:05 crc kubenswrapper[4799]: I0121 19:05:05.132818 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"254949e9-614e-419a-ba47-42bf8850d001","Type":"ContainerStarted","Data":"e4d5a800d9a9df560fdd97b063b4328f517f94dca89f75ff131323ae40e70576"} Jan 21 19:05:06 crc kubenswrapper[4799]: I0121 19:05:06.146344 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"254949e9-614e-419a-ba47-42bf8850d001","Type":"ContainerStarted","Data":"36ab29c33ed8c93a08baf8dde805cb2605a286d20240e3df2747ca8ffed51d94"} Jan 21 19:05:06 crc kubenswrapper[4799]: I0121 19:05:06.170857 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.362217261 podStartE2EDuration="3.170835419s" podCreationTimestamp="2026-01-21 19:05:03 +0000 UTC" firstStartedPulling="2026-01-21 19:05:04.509672509 +0000 UTC m=+5531.135962572" lastFinishedPulling="2026-01-21 19:05:05.318290677 +0000 UTC m=+5531.944580730" observedRunningTime="2026-01-21 19:05:06.164271303 +0000 UTC m=+5532.790561346" watchObservedRunningTime="2026-01-21 19:05:06.170835419 +0000 UTC m=+5532.797125462" Jan 21 19:05:08 crc kubenswrapper[4799]: I0121 19:05:08.218352 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:05:08 crc kubenswrapper[4799]: E0121 19:05:08.218956 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:05:21 crc kubenswrapper[4799]: I0121 19:05:21.205474 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:05:21 crc kubenswrapper[4799]: E0121 19:05:21.206367 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:05:31 crc kubenswrapper[4799]: I0121 19:05:31.838117 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-24c4t/must-gather-tr5nf"] Jan 21 19:05:31 crc kubenswrapper[4799]: I0121 19:05:31.840624 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:05:31 crc kubenswrapper[4799]: I0121 19:05:31.843044 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-24c4t"/"openshift-service-ca.crt" Jan 21 19:05:31 crc kubenswrapper[4799]: I0121 19:05:31.843854 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-24c4t"/"kube-root-ca.crt" Jan 21 19:05:31 crc kubenswrapper[4799]: I0121 19:05:31.843895 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-24c4t"/"default-dockercfg-x7r8n" Jan 21 19:05:31 crc kubenswrapper[4799]: I0121 19:05:31.850370 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-24c4t/must-gather-tr5nf"] Jan 21 19:05:31 crc kubenswrapper[4799]: I0121 19:05:31.957400 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3649db7-cfc4-45b4-9638-40680f5fd784-must-gather-output\") pod \"must-gather-tr5nf\" (UID: \"b3649db7-cfc4-45b4-9638-40680f5fd784\") " pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:05:31 crc kubenswrapper[4799]: I0121 19:05:31.957812 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgc5d\" (UniqueName: \"kubernetes.io/projected/b3649db7-cfc4-45b4-9638-40680f5fd784-kube-api-access-hgc5d\") pod \"must-gather-tr5nf\" (UID: \"b3649db7-cfc4-45b4-9638-40680f5fd784\") " pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:05:32 crc kubenswrapper[4799]: I0121 19:05:32.060084 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgc5d\" (UniqueName: \"kubernetes.io/projected/b3649db7-cfc4-45b4-9638-40680f5fd784-kube-api-access-hgc5d\") pod \"must-gather-tr5nf\" (UID: \"b3649db7-cfc4-45b4-9638-40680f5fd784\") " pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:05:32 crc kubenswrapper[4799]: I0121 19:05:32.060337 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3649db7-cfc4-45b4-9638-40680f5fd784-must-gather-output\") pod \"must-gather-tr5nf\" (UID: \"b3649db7-cfc4-45b4-9638-40680f5fd784\") " pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:05:32 crc kubenswrapper[4799]: I0121 19:05:32.060854 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3649db7-cfc4-45b4-9638-40680f5fd784-must-gather-output\") pod \"must-gather-tr5nf\" (UID: \"b3649db7-cfc4-45b4-9638-40680f5fd784\") " pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:05:32 crc kubenswrapper[4799]: I0121 19:05:32.079265 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgc5d\" (UniqueName: \"kubernetes.io/projected/b3649db7-cfc4-45b4-9638-40680f5fd784-kube-api-access-hgc5d\") pod \"must-gather-tr5nf\" (UID: \"b3649db7-cfc4-45b4-9638-40680f5fd784\") " pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:05:32 crc kubenswrapper[4799]: I0121 19:05:32.186779 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:05:32 crc kubenswrapper[4799]: I0121 19:05:32.706261 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-24c4t/must-gather-tr5nf"] Jan 21 19:05:33 crc kubenswrapper[4799]: I0121 19:05:33.474302 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/must-gather-tr5nf" event={"ID":"b3649db7-cfc4-45b4-9638-40680f5fd784","Type":"ContainerStarted","Data":"25150eb0d4a35b3a3caa568683794c8c99c35a150d96291b8667ce708b0057dc"} Jan 21 19:05:35 crc kubenswrapper[4799]: I0121 19:05:35.205554 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:05:35 crc kubenswrapper[4799]: E0121 19:05:35.206240 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:05:39 crc kubenswrapper[4799]: I0121 19:05:39.541574 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/must-gather-tr5nf" event={"ID":"b3649db7-cfc4-45b4-9638-40680f5fd784","Type":"ContainerStarted","Data":"d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6"} Jan 21 19:05:39 crc kubenswrapper[4799]: I0121 19:05:39.542120 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/must-gather-tr5nf" event={"ID":"b3649db7-cfc4-45b4-9638-40680f5fd784","Type":"ContainerStarted","Data":"b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619"} Jan 21 19:05:39 crc kubenswrapper[4799]: I0121 19:05:39.564588 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-24c4t/must-gather-tr5nf" podStartSLOduration=2.605462738 podStartE2EDuration="8.564558694s" podCreationTimestamp="2026-01-21 19:05:31 +0000 UTC" firstStartedPulling="2026-01-21 19:05:32.709887328 +0000 UTC m=+5559.336177351" lastFinishedPulling="2026-01-21 19:05:38.668983264 +0000 UTC m=+5565.295273307" observedRunningTime="2026-01-21 19:05:39.555391145 +0000 UTC m=+5566.181681168" watchObservedRunningTime="2026-01-21 19:05:39.564558694 +0000 UTC m=+5566.190848727" Jan 21 19:05:43 crc kubenswrapper[4799]: I0121 19:05:43.785875 4799 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="04f9c729-36bb-4aa5-9060-af5b0666b196" containerName="galera" probeResult="failure" output="command timed out" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.148447 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-24c4t/crc-debug-pjwsf"] Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.150735 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.296371 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m25n\" (UniqueName: \"kubernetes.io/projected/b8cabebb-a104-4b59-87a0-5b2f19eb160e-kube-api-access-8m25n\") pod \"crc-debug-pjwsf\" (UID: \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\") " pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.296468 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b8cabebb-a104-4b59-87a0-5b2f19eb160e-host\") pod \"crc-debug-pjwsf\" (UID: \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\") " pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.398205 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m25n\" (UniqueName: \"kubernetes.io/projected/b8cabebb-a104-4b59-87a0-5b2f19eb160e-kube-api-access-8m25n\") pod \"crc-debug-pjwsf\" (UID: \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\") " pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.398321 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b8cabebb-a104-4b59-87a0-5b2f19eb160e-host\") pod \"crc-debug-pjwsf\" (UID: \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\") " pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.398570 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b8cabebb-a104-4b59-87a0-5b2f19eb160e-host\") pod \"crc-debug-pjwsf\" (UID: \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\") " pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.427553 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m25n\" (UniqueName: \"kubernetes.io/projected/b8cabebb-a104-4b59-87a0-5b2f19eb160e-kube-api-access-8m25n\") pod \"crc-debug-pjwsf\" (UID: \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\") " pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.473800 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:05:44 crc kubenswrapper[4799]: I0121 19:05:44.594709 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/crc-debug-pjwsf" event={"ID":"b8cabebb-a104-4b59-87a0-5b2f19eb160e","Type":"ContainerStarted","Data":"3d308831d2aefa6d716b3fd45039963168857d98b2fa4ead9b36d7708e862815"} Jan 21 19:05:47 crc kubenswrapper[4799]: I0121 19:05:47.205949 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:05:47 crc kubenswrapper[4799]: E0121 19:05:47.206762 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:05:57 crc kubenswrapper[4799]: I0121 19:05:57.729344 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/crc-debug-pjwsf" event={"ID":"b8cabebb-a104-4b59-87a0-5b2f19eb160e","Type":"ContainerStarted","Data":"ef69ae2e51bd85439fdbe0d722116aa73b23c328021054f693d088db99dea163"} Jan 21 19:05:57 crc kubenswrapper[4799]: I0121 19:05:57.752436 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-24c4t/crc-debug-pjwsf" podStartSLOduration=1.2489004320000001 podStartE2EDuration="13.752412384s" podCreationTimestamp="2026-01-21 19:05:44 +0000 UTC" firstStartedPulling="2026-01-21 19:05:44.559444918 +0000 UTC m=+5571.185734951" lastFinishedPulling="2026-01-21 19:05:57.06295688 +0000 UTC m=+5583.689246903" observedRunningTime="2026-01-21 19:05:57.742671729 +0000 UTC m=+5584.368961762" watchObservedRunningTime="2026-01-21 19:05:57.752412384 +0000 UTC m=+5584.378702417" Jan 21 19:05:59 crc kubenswrapper[4799]: I0121 19:05:59.206021 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:05:59 crc kubenswrapper[4799]: E0121 19:05:59.206660 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:06:11 crc kubenswrapper[4799]: I0121 19:06:11.205907 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:06:11 crc kubenswrapper[4799]: E0121 19:06:11.206944 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:06:23 crc kubenswrapper[4799]: I0121 19:06:23.205644 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:06:23 crc kubenswrapper[4799]: E0121 19:06:23.206577 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:06:35 crc kubenswrapper[4799]: I0121 19:06:35.206964 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:06:36 crc kubenswrapper[4799]: I0121 19:06:36.143972 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"e12abc633fcfdeb9e13a1b3701fb517e8f640918231936c27b04228e02bff853"} Jan 21 19:06:41 crc kubenswrapper[4799]: I0121 19:06:41.202694 4799 generic.go:334] "Generic (PLEG): container finished" podID="b8cabebb-a104-4b59-87a0-5b2f19eb160e" containerID="ef69ae2e51bd85439fdbe0d722116aa73b23c328021054f693d088db99dea163" exitCode=0 Jan 21 19:06:41 crc kubenswrapper[4799]: I0121 19:06:41.202791 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/crc-debug-pjwsf" event={"ID":"b8cabebb-a104-4b59-87a0-5b2f19eb160e","Type":"ContainerDied","Data":"ef69ae2e51bd85439fdbe0d722116aa73b23c328021054f693d088db99dea163"} Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.348179 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.386655 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-24c4t/crc-debug-pjwsf"] Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.401340 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-24c4t/crc-debug-pjwsf"] Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.464626 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8m25n\" (UniqueName: \"kubernetes.io/projected/b8cabebb-a104-4b59-87a0-5b2f19eb160e-kube-api-access-8m25n\") pod \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\" (UID: \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\") " Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.464770 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b8cabebb-a104-4b59-87a0-5b2f19eb160e-host\") pod \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\" (UID: \"b8cabebb-a104-4b59-87a0-5b2f19eb160e\") " Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.465407 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b8cabebb-a104-4b59-87a0-5b2f19eb160e-host" (OuterVolumeSpecName: "host") pod "b8cabebb-a104-4b59-87a0-5b2f19eb160e" (UID: "b8cabebb-a104-4b59-87a0-5b2f19eb160e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.484507 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8cabebb-a104-4b59-87a0-5b2f19eb160e-kube-api-access-8m25n" (OuterVolumeSpecName: "kube-api-access-8m25n") pod "b8cabebb-a104-4b59-87a0-5b2f19eb160e" (UID: "b8cabebb-a104-4b59-87a0-5b2f19eb160e"). InnerVolumeSpecName "kube-api-access-8m25n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.567659 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8m25n\" (UniqueName: \"kubernetes.io/projected/b8cabebb-a104-4b59-87a0-5b2f19eb160e-kube-api-access-8m25n\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:42 crc kubenswrapper[4799]: I0121 19:06:42.567702 4799 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b8cabebb-a104-4b59-87a0-5b2f19eb160e-host\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.226451 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d308831d2aefa6d716b3fd45039963168857d98b2fa4ead9b36d7708e862815" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.226594 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-pjwsf" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.587472 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-24c4t/crc-debug-jwcth"] Jan 21 19:06:43 crc kubenswrapper[4799]: E0121 19:06:43.588182 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8cabebb-a104-4b59-87a0-5b2f19eb160e" containerName="container-00" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.588199 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8cabebb-a104-4b59-87a0-5b2f19eb160e" containerName="container-00" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.588432 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8cabebb-a104-4b59-87a0-5b2f19eb160e" containerName="container-00" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.589102 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.692300 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a64b41a1-4088-4a94-894d-488db03b8b7e-host\") pod \"crc-debug-jwcth\" (UID: \"a64b41a1-4088-4a94-894d-488db03b8b7e\") " pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.692399 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv5kw\" (UniqueName: \"kubernetes.io/projected/a64b41a1-4088-4a94-894d-488db03b8b7e-kube-api-access-bv5kw\") pod \"crc-debug-jwcth\" (UID: \"a64b41a1-4088-4a94-894d-488db03b8b7e\") " pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.793983 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a64b41a1-4088-4a94-894d-488db03b8b7e-host\") pod \"crc-debug-jwcth\" (UID: \"a64b41a1-4088-4a94-894d-488db03b8b7e\") " pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.794081 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv5kw\" (UniqueName: \"kubernetes.io/projected/a64b41a1-4088-4a94-894d-488db03b8b7e-kube-api-access-bv5kw\") pod \"crc-debug-jwcth\" (UID: \"a64b41a1-4088-4a94-894d-488db03b8b7e\") " pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.794203 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a64b41a1-4088-4a94-894d-488db03b8b7e-host\") pod \"crc-debug-jwcth\" (UID: \"a64b41a1-4088-4a94-894d-488db03b8b7e\") " pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.813867 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv5kw\" (UniqueName: \"kubernetes.io/projected/a64b41a1-4088-4a94-894d-488db03b8b7e-kube-api-access-bv5kw\") pod \"crc-debug-jwcth\" (UID: \"a64b41a1-4088-4a94-894d-488db03b8b7e\") " pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:43 crc kubenswrapper[4799]: I0121 19:06:43.911471 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:44 crc kubenswrapper[4799]: I0121 19:06:44.218528 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8cabebb-a104-4b59-87a0-5b2f19eb160e" path="/var/lib/kubelet/pods/b8cabebb-a104-4b59-87a0-5b2f19eb160e/volumes" Jan 21 19:06:44 crc kubenswrapper[4799]: I0121 19:06:44.244030 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/crc-debug-jwcth" event={"ID":"a64b41a1-4088-4a94-894d-488db03b8b7e","Type":"ContainerStarted","Data":"94e0267cbb5a278fede014cb1520320966225812ac126613f22569d6e9a86938"} Jan 21 19:06:45 crc kubenswrapper[4799]: I0121 19:06:45.255338 4799 generic.go:334] "Generic (PLEG): container finished" podID="a64b41a1-4088-4a94-894d-488db03b8b7e" containerID="1ef601f3be665c2c6958d796b8871c621dbd4688508a54828098c2ee08ff609a" exitCode=0 Jan 21 19:06:45 crc kubenswrapper[4799]: I0121 19:06:45.255460 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/crc-debug-jwcth" event={"ID":"a64b41a1-4088-4a94-894d-488db03b8b7e","Type":"ContainerDied","Data":"1ef601f3be665c2c6958d796b8871c621dbd4688508a54828098c2ee08ff609a"} Jan 21 19:06:46 crc kubenswrapper[4799]: I0121 19:06:46.369602 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:46 crc kubenswrapper[4799]: I0121 19:06:46.552218 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a64b41a1-4088-4a94-894d-488db03b8b7e-host\") pod \"a64b41a1-4088-4a94-894d-488db03b8b7e\" (UID: \"a64b41a1-4088-4a94-894d-488db03b8b7e\") " Jan 21 19:06:46 crc kubenswrapper[4799]: I0121 19:06:46.552717 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv5kw\" (UniqueName: \"kubernetes.io/projected/a64b41a1-4088-4a94-894d-488db03b8b7e-kube-api-access-bv5kw\") pod \"a64b41a1-4088-4a94-894d-488db03b8b7e\" (UID: \"a64b41a1-4088-4a94-894d-488db03b8b7e\") " Jan 21 19:06:46 crc kubenswrapper[4799]: I0121 19:06:46.552320 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a64b41a1-4088-4a94-894d-488db03b8b7e-host" (OuterVolumeSpecName: "host") pod "a64b41a1-4088-4a94-894d-488db03b8b7e" (UID: "a64b41a1-4088-4a94-894d-488db03b8b7e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 19:06:46 crc kubenswrapper[4799]: I0121 19:06:46.553691 4799 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a64b41a1-4088-4a94-894d-488db03b8b7e-host\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:46 crc kubenswrapper[4799]: I0121 19:06:46.573027 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a64b41a1-4088-4a94-894d-488db03b8b7e-kube-api-access-bv5kw" (OuterVolumeSpecName: "kube-api-access-bv5kw") pod "a64b41a1-4088-4a94-894d-488db03b8b7e" (UID: "a64b41a1-4088-4a94-894d-488db03b8b7e"). InnerVolumeSpecName "kube-api-access-bv5kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:06:46 crc kubenswrapper[4799]: I0121 19:06:46.656530 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv5kw\" (UniqueName: \"kubernetes.io/projected/a64b41a1-4088-4a94-894d-488db03b8b7e-kube-api-access-bv5kw\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:47 crc kubenswrapper[4799]: I0121 19:06:47.276041 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/crc-debug-jwcth" event={"ID":"a64b41a1-4088-4a94-894d-488db03b8b7e","Type":"ContainerDied","Data":"94e0267cbb5a278fede014cb1520320966225812ac126613f22569d6e9a86938"} Jan 21 19:06:47 crc kubenswrapper[4799]: I0121 19:06:47.276357 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94e0267cbb5a278fede014cb1520320966225812ac126613f22569d6e9a86938" Jan 21 19:06:47 crc kubenswrapper[4799]: I0121 19:06:47.276246 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-jwcth" Jan 21 19:06:47 crc kubenswrapper[4799]: I0121 19:06:47.771334 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-24c4t/crc-debug-jwcth"] Jan 21 19:06:47 crc kubenswrapper[4799]: I0121 19:06:47.780077 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-24c4t/crc-debug-jwcth"] Jan 21 19:06:48 crc kubenswrapper[4799]: I0121 19:06:48.235376 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a64b41a1-4088-4a94-894d-488db03b8b7e" path="/var/lib/kubelet/pods/a64b41a1-4088-4a94-894d-488db03b8b7e/volumes" Jan 21 19:06:48 crc kubenswrapper[4799]: I0121 19:06:48.970655 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-24c4t/crc-debug-mknpr"] Jan 21 19:06:48 crc kubenswrapper[4799]: E0121 19:06:48.971404 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a64b41a1-4088-4a94-894d-488db03b8b7e" containerName="container-00" Jan 21 19:06:48 crc kubenswrapper[4799]: I0121 19:06:48.971419 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="a64b41a1-4088-4a94-894d-488db03b8b7e" containerName="container-00" Jan 21 19:06:48 crc kubenswrapper[4799]: I0121 19:06:48.971626 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="a64b41a1-4088-4a94-894d-488db03b8b7e" containerName="container-00" Jan 21 19:06:48 crc kubenswrapper[4799]: I0121 19:06:48.972371 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:49 crc kubenswrapper[4799]: I0121 19:06:49.111288 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e5d17f14-0a7a-492b-a912-79b3491853ea-host\") pod \"crc-debug-mknpr\" (UID: \"e5d17f14-0a7a-492b-a912-79b3491853ea\") " pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:49 crc kubenswrapper[4799]: I0121 19:06:49.111489 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgrh6\" (UniqueName: \"kubernetes.io/projected/e5d17f14-0a7a-492b-a912-79b3491853ea-kube-api-access-rgrh6\") pod \"crc-debug-mknpr\" (UID: \"e5d17f14-0a7a-492b-a912-79b3491853ea\") " pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:49 crc kubenswrapper[4799]: I0121 19:06:49.214045 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e5d17f14-0a7a-492b-a912-79b3491853ea-host\") pod \"crc-debug-mknpr\" (UID: \"e5d17f14-0a7a-492b-a912-79b3491853ea\") " pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:49 crc kubenswrapper[4799]: I0121 19:06:49.214154 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e5d17f14-0a7a-492b-a912-79b3491853ea-host\") pod \"crc-debug-mknpr\" (UID: \"e5d17f14-0a7a-492b-a912-79b3491853ea\") " pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:49 crc kubenswrapper[4799]: I0121 19:06:49.214199 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgrh6\" (UniqueName: \"kubernetes.io/projected/e5d17f14-0a7a-492b-a912-79b3491853ea-kube-api-access-rgrh6\") pod \"crc-debug-mknpr\" (UID: \"e5d17f14-0a7a-492b-a912-79b3491853ea\") " pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:49 crc kubenswrapper[4799]: I0121 19:06:49.242456 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgrh6\" (UniqueName: \"kubernetes.io/projected/e5d17f14-0a7a-492b-a912-79b3491853ea-kube-api-access-rgrh6\") pod \"crc-debug-mknpr\" (UID: \"e5d17f14-0a7a-492b-a912-79b3491853ea\") " pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:49 crc kubenswrapper[4799]: I0121 19:06:49.295478 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:50 crc kubenswrapper[4799]: W0121 19:06:50.110285 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5d17f14_0a7a_492b_a912_79b3491853ea.slice/crio-d47a0b011bca51e2189e22c320c065df5b782e9b3a2daa565fd304d324076d4f WatchSource:0}: Error finding container d47a0b011bca51e2189e22c320c065df5b782e9b3a2daa565fd304d324076d4f: Status 404 returned error can't find the container with id d47a0b011bca51e2189e22c320c065df5b782e9b3a2daa565fd304d324076d4f Jan 21 19:06:50 crc kubenswrapper[4799]: I0121 19:06:50.311367 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/crc-debug-mknpr" event={"ID":"e5d17f14-0a7a-492b-a912-79b3491853ea","Type":"ContainerStarted","Data":"d47a0b011bca51e2189e22c320c065df5b782e9b3a2daa565fd304d324076d4f"} Jan 21 19:06:51 crc kubenswrapper[4799]: I0121 19:06:51.343411 4799 generic.go:334] "Generic (PLEG): container finished" podID="e5d17f14-0a7a-492b-a912-79b3491853ea" containerID="06cefbf157b12d0930310749a3247b702c00469bc9d4357ddab62e24834aa1fb" exitCode=0 Jan 21 19:06:51 crc kubenswrapper[4799]: I0121 19:06:51.343488 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/crc-debug-mknpr" event={"ID":"e5d17f14-0a7a-492b-a912-79b3491853ea","Type":"ContainerDied","Data":"06cefbf157b12d0930310749a3247b702c00469bc9d4357ddab62e24834aa1fb"} Jan 21 19:06:51 crc kubenswrapper[4799]: I0121 19:06:51.389384 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-24c4t/crc-debug-mknpr"] Jan 21 19:06:51 crc kubenswrapper[4799]: I0121 19:06:51.404533 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-24c4t/crc-debug-mknpr"] Jan 21 19:06:52 crc kubenswrapper[4799]: I0121 19:06:52.463560 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:52 crc kubenswrapper[4799]: I0121 19:06:52.597467 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgrh6\" (UniqueName: \"kubernetes.io/projected/e5d17f14-0a7a-492b-a912-79b3491853ea-kube-api-access-rgrh6\") pod \"e5d17f14-0a7a-492b-a912-79b3491853ea\" (UID: \"e5d17f14-0a7a-492b-a912-79b3491853ea\") " Jan 21 19:06:52 crc kubenswrapper[4799]: I0121 19:06:52.597713 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e5d17f14-0a7a-492b-a912-79b3491853ea-host\") pod \"e5d17f14-0a7a-492b-a912-79b3491853ea\" (UID: \"e5d17f14-0a7a-492b-a912-79b3491853ea\") " Jan 21 19:06:52 crc kubenswrapper[4799]: I0121 19:06:52.597926 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e5d17f14-0a7a-492b-a912-79b3491853ea-host" (OuterVolumeSpecName: "host") pod "e5d17f14-0a7a-492b-a912-79b3491853ea" (UID: "e5d17f14-0a7a-492b-a912-79b3491853ea"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 19:06:52 crc kubenswrapper[4799]: I0121 19:06:52.598353 4799 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e5d17f14-0a7a-492b-a912-79b3491853ea-host\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:52 crc kubenswrapper[4799]: I0121 19:06:52.606448 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5d17f14-0a7a-492b-a912-79b3491853ea-kube-api-access-rgrh6" (OuterVolumeSpecName: "kube-api-access-rgrh6") pod "e5d17f14-0a7a-492b-a912-79b3491853ea" (UID: "e5d17f14-0a7a-492b-a912-79b3491853ea"). InnerVolumeSpecName "kube-api-access-rgrh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:06:52 crc kubenswrapper[4799]: I0121 19:06:52.700416 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgrh6\" (UniqueName: \"kubernetes.io/projected/e5d17f14-0a7a-492b-a912-79b3491853ea-kube-api-access-rgrh6\") on node \"crc\" DevicePath \"\"" Jan 21 19:06:53 crc kubenswrapper[4799]: I0121 19:06:53.379538 4799 scope.go:117] "RemoveContainer" containerID="06cefbf157b12d0930310749a3247b702c00469bc9d4357ddab62e24834aa1fb" Jan 21 19:06:53 crc kubenswrapper[4799]: I0121 19:06:53.379567 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/crc-debug-mknpr" Jan 21 19:06:54 crc kubenswrapper[4799]: I0121 19:06:54.225891 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5d17f14-0a7a-492b-a912-79b3491853ea" path="/var/lib/kubelet/pods/e5d17f14-0a7a-492b-a912-79b3491853ea/volumes" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.410066 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rsxwq"] Jan 21 19:07:19 crc kubenswrapper[4799]: E0121 19:07:19.411695 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d17f14-0a7a-492b-a912-79b3491853ea" containerName="container-00" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.411721 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d17f14-0a7a-492b-a912-79b3491853ea" containerName="container-00" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.412101 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5d17f14-0a7a-492b-a912-79b3491853ea" containerName="container-00" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.414567 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.423390 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rsxwq"] Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.549069 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-catalog-content\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.549159 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zc9t\" (UniqueName: \"kubernetes.io/projected/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-kube-api-access-4zc9t\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.549421 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-utilities\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.651280 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-utilities\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.651388 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-catalog-content\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.651422 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zc9t\" (UniqueName: \"kubernetes.io/projected/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-kube-api-access-4zc9t\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.651914 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-utilities\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.654015 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-catalog-content\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.690406 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zc9t\" (UniqueName: \"kubernetes.io/projected/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-kube-api-access-4zc9t\") pod \"community-operators-rsxwq\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:19 crc kubenswrapper[4799]: I0121 19:07:19.746743 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:20 crc kubenswrapper[4799]: I0121 19:07:20.349956 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rsxwq"] Jan 21 19:07:20 crc kubenswrapper[4799]: W0121 19:07:20.894406 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9aaaf17_f698_44fc_b86e_99b2ebfcd235.slice/crio-b268f6ffd0c803ab0e844c1ae5d2557c78e25fd1c4dc9a4cea63b5813faac430 WatchSource:0}: Error finding container b268f6ffd0c803ab0e844c1ae5d2557c78e25fd1c4dc9a4cea63b5813faac430: Status 404 returned error can't find the container with id b268f6ffd0c803ab0e844c1ae5d2557c78e25fd1c4dc9a4cea63b5813faac430 Jan 21 19:07:21 crc kubenswrapper[4799]: I0121 19:07:21.686739 4799 generic.go:334] "Generic (PLEG): container finished" podID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerID="0ebb9b498425e04f50013aa6fa81b0c1bc0b4c3db5da7e6117a8248fe2f1c38d" exitCode=0 Jan 21 19:07:21 crc kubenswrapper[4799]: I0121 19:07:21.687035 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rsxwq" event={"ID":"d9aaaf17-f698-44fc-b86e-99b2ebfcd235","Type":"ContainerDied","Data":"0ebb9b498425e04f50013aa6fa81b0c1bc0b4c3db5da7e6117a8248fe2f1c38d"} Jan 21 19:07:21 crc kubenswrapper[4799]: I0121 19:07:21.687063 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rsxwq" event={"ID":"d9aaaf17-f698-44fc-b86e-99b2ebfcd235","Type":"ContainerStarted","Data":"b268f6ffd0c803ab0e844c1ae5d2557c78e25fd1c4dc9a4cea63b5813faac430"} Jan 21 19:07:22 crc kubenswrapper[4799]: I0121 19:07:22.697217 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rsxwq" event={"ID":"d9aaaf17-f698-44fc-b86e-99b2ebfcd235","Type":"ContainerStarted","Data":"12e1c50e40436a7df2931c68f7ea6f1ed1a86112b6d46367b06cf9c9eee4eb7b"} Jan 21 19:07:24 crc kubenswrapper[4799]: I0121 19:07:24.721960 4799 generic.go:334] "Generic (PLEG): container finished" podID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerID="12e1c50e40436a7df2931c68f7ea6f1ed1a86112b6d46367b06cf9c9eee4eb7b" exitCode=0 Jan 21 19:07:24 crc kubenswrapper[4799]: I0121 19:07:24.722044 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rsxwq" event={"ID":"d9aaaf17-f698-44fc-b86e-99b2ebfcd235","Type":"ContainerDied","Data":"12e1c50e40436a7df2931c68f7ea6f1ed1a86112b6d46367b06cf9c9eee4eb7b"} Jan 21 19:07:25 crc kubenswrapper[4799]: I0121 19:07:25.734378 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rsxwq" event={"ID":"d9aaaf17-f698-44fc-b86e-99b2ebfcd235","Type":"ContainerStarted","Data":"282e2eb964f91ccaf336563c0236ccd2cd51f9265ef87611040e468c167ee087"} Jan 21 19:07:25 crc kubenswrapper[4799]: I0121 19:07:25.766094 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rsxwq" podStartSLOduration=3.240870043 podStartE2EDuration="6.766070666s" podCreationTimestamp="2026-01-21 19:07:19 +0000 UTC" firstStartedPulling="2026-01-21 19:07:21.689168896 +0000 UTC m=+5668.315458919" lastFinishedPulling="2026-01-21 19:07:25.214369509 +0000 UTC m=+5671.840659542" observedRunningTime="2026-01-21 19:07:25.757166754 +0000 UTC m=+5672.383456797" watchObservedRunningTime="2026-01-21 19:07:25.766070666 +0000 UTC m=+5672.392360689" Jan 21 19:07:27 crc kubenswrapper[4799]: I0121 19:07:27.814590 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-cfcccc69b-6zwk4_f4c9e3bf-79dd-49d5-af90-db5a6087f0f3/barbican-api/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.021243 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-cfcccc69b-6zwk4_f4c9e3bf-79dd-49d5-af90-db5a6087f0f3/barbican-api-log/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.033005 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6b9d59f6f8-vl6sn_9bb30a38-ea0d-4580-9a41-326f00b5c149/barbican-keystone-listener/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.136406 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6b9d59f6f8-vl6sn_9bb30a38-ea0d-4580-9a41-326f00b5c149/barbican-keystone-listener-log/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.262213 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f979ff5f7-qvdts_c7dc5147-addd-46d9-b5b3-3f328c0a5a94/barbican-worker/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.307174 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f979ff5f7-qvdts_c7dc5147-addd-46d9-b5b3-3f328c0a5a94/barbican-worker-log/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.587440 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp_7f2d9e34-479a-44ae-b64e-55baf5645dfc/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.604545 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_04163938-d340-4731-82c4-e01a636b7df2/ceilometer-central-agent/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.735359 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_04163938-d340-4731-82c4-e01a636b7df2/ceilometer-notification-agent/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.819738 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_04163938-d340-4731-82c4-e01a636b7df2/proxy-httpd/0.log" Jan 21 19:07:28 crc kubenswrapper[4799]: I0121 19:07:28.848878 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_04163938-d340-4731-82c4-e01a636b7df2/sg-core/0.log" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.131181 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e8bf54a9-bf9c-47d1-b391-b73c0055c935/cinder-api-log/0.log" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.485703 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_55c0bc68-cae8-4eee-9caf-37f8a26c76f9/probe/0.log" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.594884 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_55c0bc68-cae8-4eee-9caf-37f8a26c76f9/cinder-backup/0.log" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.664264 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e8bf54a9-bf9c-47d1-b391-b73c0055c935/cinder-api/0.log" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.747548 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.747600 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.777800 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_eb283f72-f347-49c6-9813-6fece9fc70da/cinder-scheduler/0.log" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.814308 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:29 crc kubenswrapper[4799]: I0121 19:07:29.817219 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_eb283f72-f347-49c6-9813-6fece9fc70da/probe/0.log" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.042927 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_0658bd3c-d1f4-486d-957a-38f4eb9ccc10/probe/0.log" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.090463 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_0658bd3c-d1f4-486d-957a-38f4eb9ccc10/cinder-volume/0.log" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.436900 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_82503b8b-9773-4e14-9703-663675725aa9/probe/0.log" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.545057 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6_8cea4a3f-5c0a-4e15-a62d-64798a8f818e/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.555667 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_82503b8b-9773-4e14-9703-663675725aa9/cinder-volume/0.log" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.771532 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p_d427281b-c110-468f-b056-78a91049bcd4/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.807835 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59899cb9c-whmhs_41441182-ee7b-46da-9f86-975ad9b22777/init/0.log" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.846206 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:30 crc kubenswrapper[4799]: I0121 19:07:30.901395 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rsxwq"] Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.047845 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59899cb9c-whmhs_41441182-ee7b-46da-9f86-975ad9b22777/init/0.log" Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.128058 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d_0bf0a460-fd95-41ad-b7a3-8f3fb4500db7/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.290018 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59899cb9c-whmhs_41441182-ee7b-46da-9f86-975ad9b22777/dnsmasq-dns/0.log" Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.389888 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_486557f2-139f-4297-8c6c-9dc9ed6f5cdc/glance-log/0.log" Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.400994 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_486557f2-139f-4297-8c6c-9dc9ed6f5cdc/glance-httpd/0.log" Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.606415 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_5152eb20-55f2-4c0c-9a8b-6b1e9043abf9/glance-httpd/0.log" Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.678881 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_5152eb20-55f2-4c0c-9a8b-6b1e9043abf9/glance-log/0.log" Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.871730 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-585ff694b6-5fph4_b8391139-71cc-48bb-af31-242cebaea8de/horizon/0.log" Jan 21 19:07:31 crc kubenswrapper[4799]: I0121 19:07:31.948788 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-phxp5_7e239b6d-2469-4d29-b1e1-72b1d7916ada/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:32 crc kubenswrapper[4799]: I0121 19:07:32.318542 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-qknml_0d9cee91-78fe-4816-a3dc-db90e98bcddd/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:32 crc kubenswrapper[4799]: I0121 19:07:32.562166 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29483641-hhgjf_e4a59182-8711-4086-b753-a527b88f464b/keystone-cron/0.log" Jan 21 19:07:32 crc kubenswrapper[4799]: I0121 19:07:32.643342 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-585ff694b6-5fph4_b8391139-71cc-48bb-af31-242cebaea8de/horizon-log/0.log" Jan 21 19:07:32 crc kubenswrapper[4799]: I0121 19:07:32.723278 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29483701-6vvv2_7295fe64-03cb-4766-934e-cbd3eace9c00/keystone-cron/0.log" Jan 21 19:07:32 crc kubenswrapper[4799]: I0121 19:07:32.806092 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rsxwq" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerName="registry-server" containerID="cri-o://282e2eb964f91ccaf336563c0236ccd2cd51f9265ef87611040e468c167ee087" gracePeriod=2 Jan 21 19:07:32 crc kubenswrapper[4799]: I0121 19:07:32.874627 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-86999674c5-gpgq6_4cc3ff02-feee-4b55-a057-99380b99a10e/keystone-api/0.log" Jan 21 19:07:32 crc kubenswrapper[4799]: I0121 19:07:32.933535 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_dab8cf8d-7956-4cfb-a107-0e15661fc5f7/kube-state-metrics/0.log" Jan 21 19:07:33 crc kubenswrapper[4799]: I0121 19:07:33.047476 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx_34ff08b0-f833-4c42-90a7-68af92ba7ce8/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:33 crc kubenswrapper[4799]: I0121 19:07:33.828242 4799 generic.go:334] "Generic (PLEG): container finished" podID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerID="282e2eb964f91ccaf336563c0236ccd2cd51f9265ef87611040e468c167ee087" exitCode=0 Jan 21 19:07:33 crc kubenswrapper[4799]: I0121 19:07:33.828589 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rsxwq" event={"ID":"d9aaaf17-f698-44fc-b86e-99b2ebfcd235","Type":"ContainerDied","Data":"282e2eb964f91ccaf336563c0236ccd2cd51f9265ef87611040e468c167ee087"} Jan 21 19:07:33 crc kubenswrapper[4799]: I0121 19:07:33.990614 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.089688 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-catalog-content\") pod \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.089973 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zc9t\" (UniqueName: \"kubernetes.io/projected/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-kube-api-access-4zc9t\") pod \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.090005 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-utilities\") pod \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\" (UID: \"d9aaaf17-f698-44fc-b86e-99b2ebfcd235\") " Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.091511 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-utilities" (OuterVolumeSpecName: "utilities") pod "d9aaaf17-f698-44fc-b86e-99b2ebfcd235" (UID: "d9aaaf17-f698-44fc-b86e-99b2ebfcd235"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.109395 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-kube-api-access-4zc9t" (OuterVolumeSpecName: "kube-api-access-4zc9t") pod "d9aaaf17-f698-44fc-b86e-99b2ebfcd235" (UID: "d9aaaf17-f698-44fc-b86e-99b2ebfcd235"). InnerVolumeSpecName "kube-api-access-4zc9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.173671 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9aaaf17-f698-44fc-b86e-99b2ebfcd235" (UID: "d9aaaf17-f698-44fc-b86e-99b2ebfcd235"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.193981 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zc9t\" (UniqueName: \"kubernetes.io/projected/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-kube-api-access-4zc9t\") on node \"crc\" DevicePath \"\"" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.194022 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.194035 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9aaaf17-f698-44fc-b86e-99b2ebfcd235-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.240322 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7644966657-gcssj_95553e27-38f3-4a0d-a382-d87410ca7ec3/neutron-httpd/0.log" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.245853 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s_f5e75302-b14f-4281-93e3-a40bff4bcafa/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.339591 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7644966657-gcssj_95553e27-38f3-4a0d-a382-d87410ca7ec3/neutron-api/0.log" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.900226 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rsxwq" event={"ID":"d9aaaf17-f698-44fc-b86e-99b2ebfcd235","Type":"ContainerDied","Data":"b268f6ffd0c803ab0e844c1ae5d2557c78e25fd1c4dc9a4cea63b5813faac430"} Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.900287 4799 scope.go:117] "RemoveContainer" containerID="282e2eb964f91ccaf336563c0236ccd2cd51f9265ef87611040e468c167ee087" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.900492 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rsxwq" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.942934 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rsxwq"] Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.950603 4799 scope.go:117] "RemoveContainer" containerID="12e1c50e40436a7df2931c68f7ea6f1ed1a86112b6d46367b06cf9c9eee4eb7b" Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.954292 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rsxwq"] Jan 21 19:07:34 crc kubenswrapper[4799]: I0121 19:07:34.986995 4799 scope.go:117] "RemoveContainer" containerID="0ebb9b498425e04f50013aa6fa81b0c1bc0b4c3db5da7e6117a8248fe2f1c38d" Jan 21 19:07:35 crc kubenswrapper[4799]: I0121 19:07:35.146531 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_d21651e9-1ffb-472f-8c41-652621413b50/nova-cell0-conductor-conductor/0.log" Jan 21 19:07:35 crc kubenswrapper[4799]: I0121 19:07:35.547938 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_ff6ad04e-2495-4af0-b908-ee65bb277ebc/nova-cell1-conductor-conductor/0.log" Jan 21 19:07:35 crc kubenswrapper[4799]: I0121 19:07:35.778468 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9/nova-cell1-novncproxy-novncproxy/0.log" Jan 21 19:07:36 crc kubenswrapper[4799]: I0121 19:07:36.048450 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d954dc98-6a6a-49b8-976c-b668619adcff/nova-api-log/0.log" Jan 21 19:07:36 crc kubenswrapper[4799]: I0121 19:07:36.222453 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" path="/var/lib/kubelet/pods/d9aaaf17-f698-44fc-b86e-99b2ebfcd235/volumes" Jan 21 19:07:36 crc kubenswrapper[4799]: I0121 19:07:36.517041 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-bs6xc_9f3a320d-5663-4a3d-adb0-fa58906a7a91/nova-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:36 crc kubenswrapper[4799]: I0121 19:07:36.556764 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d954dc98-6a6a-49b8-976c-b668619adcff/nova-api-api/0.log" Jan 21 19:07:36 crc kubenswrapper[4799]: I0121 19:07:36.721371 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd/nova-metadata-log/0.log" Jan 21 19:07:36 crc kubenswrapper[4799]: I0121 19:07:36.991793 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e95ea0b2-ade1-4aaa-ad67-b85ebde84afa/mysql-bootstrap/0.log" Jan 21 19:07:37 crc kubenswrapper[4799]: I0121 19:07:37.197008 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_113d1aee-0a9f-47dd-9a33-ab951cab8535/nova-scheduler-scheduler/0.log" Jan 21 19:07:37 crc kubenswrapper[4799]: I0121 19:07:37.235731 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e95ea0b2-ade1-4aaa-ad67-b85ebde84afa/mysql-bootstrap/0.log" Jan 21 19:07:37 crc kubenswrapper[4799]: I0121 19:07:37.251397 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e95ea0b2-ade1-4aaa-ad67-b85ebde84afa/galera/0.log" Jan 21 19:07:37 crc kubenswrapper[4799]: I0121 19:07:37.503091 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_04f9c729-36bb-4aa5-9060-af5b0666b196/mysql-bootstrap/0.log" Jan 21 19:07:37 crc kubenswrapper[4799]: I0121 19:07:37.744629 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_04f9c729-36bb-4aa5-9060-af5b0666b196/galera/0.log" Jan 21 19:07:37 crc kubenswrapper[4799]: I0121 19:07:37.782840 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_04f9c729-36bb-4aa5-9060-af5b0666b196/mysql-bootstrap/0.log" Jan 21 19:07:37 crc kubenswrapper[4799]: I0121 19:07:37.986269 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_ff8dc291-a487-43d0-a494-9496737703ef/openstackclient/0.log" Jan 21 19:07:38 crc kubenswrapper[4799]: I0121 19:07:38.032411 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-68wt5_05213e52-1f99-42a4-b882-4514760063c7/ovn-controller/0.log" Jan 21 19:07:38 crc kubenswrapper[4799]: I0121 19:07:38.282745 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-bxtjr_b117b068-807b-4c10-8c30-46648892f87f/openstack-network-exporter/0.log" Jan 21 19:07:38 crc kubenswrapper[4799]: I0121 19:07:38.480426 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5dwpd_0d199dae-6bd1-48c4-8a95-25ffd4555e29/ovsdb-server-init/0.log" Jan 21 19:07:38 crc kubenswrapper[4799]: I0121 19:07:38.742963 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5dwpd_0d199dae-6bd1-48c4-8a95-25ffd4555e29/ovsdb-server/0.log" Jan 21 19:07:38 crc kubenswrapper[4799]: I0121 19:07:38.749527 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5dwpd_0d199dae-6bd1-48c4-8a95-25ffd4555e29/ovsdb-server-init/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.065952 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-mrgkt_a1a6a3df-3a95-4614-92f5-25fd585431b5/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.249210 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5dwpd_0d199dae-6bd1-48c4-8a95-25ffd4555e29/ovs-vswitchd/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.335495 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e983d617-7cd1-416a-8955-c3d755e4a5b0/openstack-network-exporter/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.377385 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd/nova-metadata-metadata/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.464839 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e983d617-7cd1-416a-8955-c3d755e4a5b0/ovn-northd/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.598286 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_58187703-2c52-4f99-8d9a-65306c90c5ed/openstack-network-exporter/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.627108 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_58187703-2c52-4f99-8d9a-65306c90c5ed/ovsdbserver-nb/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.771502 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_acea8227-6d95-4c5f-bba7-8e954701de28/openstack-network-exporter/0.log" Jan 21 19:07:39 crc kubenswrapper[4799]: I0121 19:07:39.832778 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_acea8227-6d95-4c5f-bba7-8e954701de28/ovsdbserver-sb/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.180092 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/init-config-reloader/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.257902 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-55f8df6d54-cffcw_d03c23b6-50c8-4a4b-b2ea-53c4a3010790/placement-api/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.329566 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-55f8df6d54-cffcw_d03c23b6-50c8-4a4b-b2ea-53c4a3010790/placement-log/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.412027 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/init-config-reloader/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.412437 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/config-reloader/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.510285 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/prometheus/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.557906 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/thanos-sidecar/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.671996 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_88d1e166-bb2f-473e-a955-e79c6251a580/setup-container/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.875359 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_88d1e166-bb2f-473e-a955-e79c6251a580/setup-container/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.928793 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_88d1e166-bb2f-473e-a955-e79c6251a580/rabbitmq/0.log" Jan 21 19:07:40 crc kubenswrapper[4799]: I0121 19:07:40.994550 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_63677f61-4283-417a-bcf7-303840452589/setup-container/0.log" Jan 21 19:07:41 crc kubenswrapper[4799]: I0121 19:07:41.217897 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_63677f61-4283-417a-bcf7-303840452589/setup-container/0.log" Jan 21 19:07:41 crc kubenswrapper[4799]: I0121 19:07:41.276529 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_135b6a22-006b-4270-a559-39fc323570b2/setup-container/0.log" Jan 21 19:07:41 crc kubenswrapper[4799]: I0121 19:07:41.291883 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_63677f61-4283-417a-bcf7-303840452589/rabbitmq/0.log" Jan 21 19:07:41 crc kubenswrapper[4799]: I0121 19:07:41.529173 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_135b6a22-006b-4270-a559-39fc323570b2/setup-container/0.log" Jan 21 19:07:41 crc kubenswrapper[4799]: I0121 19:07:41.550564 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf_947392cf-f31e-4cc3-85b9-3fcf86b289ef/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:41 crc kubenswrapper[4799]: I0121 19:07:41.553260 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_135b6a22-006b-4270-a559-39fc323570b2/rabbitmq/0.log" Jan 21 19:07:41 crc kubenswrapper[4799]: I0121 19:07:41.844500 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-f8tvb_7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:41 crc kubenswrapper[4799]: I0121 19:07:41.863414 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd_509437ec-6d22-4843-accb-db316692f6c9/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:42 crc kubenswrapper[4799]: I0121 19:07:42.051439 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-2sm5r_1ea44035-c1c7-45f2-921a-bf2d91a9a7d8/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:42 crc kubenswrapper[4799]: I0121 19:07:42.263457 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-d92xl_a94b72a4-75d5-427c-86ab-014f1f9ee0a2/ssh-known-hosts-edpm-deployment/0.log" Jan 21 19:07:42 crc kubenswrapper[4799]: I0121 19:07:42.465180 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-749b6794b5-k8rw7_f7542699-9beb-4966-b1e4-b3c3cb9b42ff/proxy-server/0.log" Jan 21 19:07:42 crc kubenswrapper[4799]: I0121 19:07:42.624512 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-72p7x_a9b76963-d66a-43b7-9f1a-ef2a18ef6d02/swift-ring-rebalance/0.log" Jan 21 19:07:42 crc kubenswrapper[4799]: I0121 19:07:42.670423 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-749b6794b5-k8rw7_f7542699-9beb-4966-b1e4-b3c3cb9b42ff/proxy-httpd/0.log" Jan 21 19:07:42 crc kubenswrapper[4799]: I0121 19:07:42.785482 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/account-auditor/0.log" Jan 21 19:07:42 crc kubenswrapper[4799]: I0121 19:07:42.861625 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/account-reaper/0.log" Jan 21 19:07:42 crc kubenswrapper[4799]: I0121 19:07:42.929858 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/account-replicator/0.log" Jan 21 19:07:43 crc kubenswrapper[4799]: I0121 19:07:43.341553 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/container-auditor/0.log" Jan 21 19:07:43 crc kubenswrapper[4799]: I0121 19:07:43.403276 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/account-server/0.log" Jan 21 19:07:43 crc kubenswrapper[4799]: I0121 19:07:43.471178 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/container-server/0.log" Jan 21 19:07:43 crc kubenswrapper[4799]: I0121 19:07:43.566790 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/container-replicator/0.log" Jan 21 19:07:43 crc kubenswrapper[4799]: I0121 19:07:43.774893 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-auditor/0.log" Jan 21 19:07:43 crc kubenswrapper[4799]: I0121 19:07:43.850988 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/container-updater/0.log" Jan 21 19:07:43 crc kubenswrapper[4799]: I0121 19:07:43.866419 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-expirer/0.log" Jan 21 19:07:43 crc kubenswrapper[4799]: I0121 19:07:43.934451 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-replicator/0.log" Jan 21 19:07:44 crc kubenswrapper[4799]: I0121 19:07:44.082146 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-server/0.log" Jan 21 19:07:44 crc kubenswrapper[4799]: I0121 19:07:44.126399 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-updater/0.log" Jan 21 19:07:44 crc kubenswrapper[4799]: I0121 19:07:44.153102 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/swift-recon-cron/0.log" Jan 21 19:07:44 crc kubenswrapper[4799]: I0121 19:07:44.176920 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/rsync/0.log" Jan 21 19:07:44 crc kubenswrapper[4799]: I0121 19:07:44.462004 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_384bc0b0-0caa-45e3-b892-155def4ed881/tempest-tests-tempest-tests-runner/0.log" Jan 21 19:07:44 crc kubenswrapper[4799]: I0121 19:07:44.505043 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn_b5f5c54c-325e-4640-8cb5-5f8ac5c91234/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:44 crc kubenswrapper[4799]: I0121 19:07:44.769230 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_254949e9-614e-419a-ba47-42bf8850d001/test-operator-logs-container/0.log" Jan 21 19:07:44 crc kubenswrapper[4799]: I0121 19:07:44.862860 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-cxws2_b2bf4e9d-98ab-403e-8275-ac50c1b2c108/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:07:46 crc kubenswrapper[4799]: I0121 19:07:46.247294 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_c37f7c3a-832c-4991-9fe0-6e923befb599/watcher-applier/0.log" Jan 21 19:07:46 crc kubenswrapper[4799]: I0121 19:07:46.267981 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c75e85a7-0869-4fe4-ba35-d51f6107027c/memcached/0.log" Jan 21 19:07:47 crc kubenswrapper[4799]: I0121 19:07:47.075961 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_40368171-ea6d-4ab3-a1de-33204529aab4/watcher-api-log/0.log" Jan 21 19:07:49 crc kubenswrapper[4799]: I0121 19:07:49.455815 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_30f6abc1-fcdc-4901-9e88-3b6c5fd2a223/watcher-decision-engine/0.log" Jan 21 19:07:50 crc kubenswrapper[4799]: I0121 19:07:50.169279 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_40368171-ea6d-4ab3-a1de-33204529aab4/watcher-api/0.log" Jan 21 19:08:19 crc kubenswrapper[4799]: I0121 19:08:19.520032 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/util/0.log" Jan 21 19:08:19 crc kubenswrapper[4799]: I0121 19:08:19.665707 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/util/0.log" Jan 21 19:08:20 crc kubenswrapper[4799]: I0121 19:08:20.224926 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/pull/0.log" Jan 21 19:08:20 crc kubenswrapper[4799]: I0121 19:08:20.297268 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/pull/0.log" Jan 21 19:08:20 crc kubenswrapper[4799]: I0121 19:08:20.476893 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/util/0.log" Jan 21 19:08:20 crc kubenswrapper[4799]: I0121 19:08:20.522400 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/pull/0.log" Jan 21 19:08:20 crc kubenswrapper[4799]: I0121 19:08:20.528192 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/extract/0.log" Jan 21 19:08:20 crc kubenswrapper[4799]: I0121 19:08:20.717585 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7ddb5c749-xgqnz_99a0338e-5d7f-47cd-a30f-8c57ab921724/manager/0.log" Jan 21 19:08:20 crc kubenswrapper[4799]: I0121 19:08:20.840387 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-9b68f5989-6gbp4_aa887ea8-0375-49c1-b802-9b3c8468fa87/manager/0.log" Jan 21 19:08:20 crc kubenswrapper[4799]: I0121 19:08:20.928870 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-9f958b845-2qd6s_0e8e19fd-c988-48ce-9150-1b46974bd86e/manager/0.log" Jan 21 19:08:21 crc kubenswrapper[4799]: I0121 19:08:21.115312 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-c6994669c-msz6d_10ffe97a-fa49-481f-9e79-55627ab24692/manager/0.log" Jan 21 19:08:21 crc kubenswrapper[4799]: I0121 19:08:21.154284 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-tslfv_70fddebf-b616-47bd-a139-d2a4999624dd/manager/0.log" Jan 21 19:08:21 crc kubenswrapper[4799]: I0121 19:08:21.305454 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-ff7f6_ac9f205a-3d30-4ca3-b253-32c441466211/manager/0.log" Jan 21 19:08:21 crc kubenswrapper[4799]: I0121 19:08:21.565169 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-78757b4889-75r9k_3a0e1cc6-500f-4493-8a18-0eeea206a4f7/manager/0.log" Jan 21 19:08:21 crc kubenswrapper[4799]: I0121 19:08:21.740929 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-77c48c7859-ffgnr_7654ac1c-746c-46e6-b276-e9f6a839a187/manager/0.log" Jan 21 19:08:21 crc kubenswrapper[4799]: I0121 19:08:21.896772 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-767fdc4f47-rw2zn_223724ab-b9ee-4f55-b1ab-bf730a6314f9/manager/0.log" Jan 21 19:08:21 crc kubenswrapper[4799]: I0121 19:08:21.914701 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-864f6b75bf-4ttvx_f90c4327-642d-4efd-90d3-7d3b83dbcfc9/manager/0.log" Jan 21 19:08:22 crc kubenswrapper[4799]: I0121 19:08:22.147617 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-bz4tf_b35d565f-4d9f-437a-add9-8ef40d891e99/manager/0.log" Jan 21 19:08:22 crc kubenswrapper[4799]: I0121 19:08:22.254634 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-cb4666565-rwglk_ed464d3c-bdd7-4b19-a332-402ddeccb65b/manager/0.log" Jan 21 19:08:22 crc kubenswrapper[4799]: I0121 19:08:22.432372 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-65849867d6-v552f_11e4c63f-cdc3-4d50-a4e7-03386747ca86/manager/0.log" Jan 21 19:08:22 crc kubenswrapper[4799]: I0121 19:08:22.502270 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7fc9b76cf6-9wst2_b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1/manager/0.log" Jan 21 19:08:22 crc kubenswrapper[4799]: I0121 19:08:22.621106 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854nht6t_4d7fd266-ebc9-46f2-9355-4dac2699822c/manager/0.log" Jan 21 19:08:22 crc kubenswrapper[4799]: I0121 19:08:22.821192 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-6664d49b67-ncnqt_35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d/operator/0.log" Jan 21 19:08:23 crc kubenswrapper[4799]: I0121 19:08:23.039908 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-kwk6b_4ef61afc-f214-4ffd-875e-b8c8dfb2426e/registry-server/0.log" Jan 21 19:08:23 crc kubenswrapper[4799]: I0121 19:08:23.489357 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-7nxnh_92a8c35c-6ef9-4453-9233-df8579764cd2/manager/0.log" Jan 21 19:08:23 crc kubenswrapper[4799]: I0121 19:08:23.497018 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-686df47fcb-2b24b_e898f43a-2487-48f8-9615-f02fdbd9eb30/manager/0.log" Jan 21 19:08:23 crc kubenswrapper[4799]: I0121 19:08:23.679446 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-hnnwf_3e5a60e4-5801-4273-a08a-20907c8bed09/operator/0.log" Jan 21 19:08:23 crc kubenswrapper[4799]: I0121 19:08:23.866063 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-85dd56d4cc-dqwbg_d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f/manager/0.log" Jan 21 19:08:24 crc kubenswrapper[4799]: I0121 19:08:24.316091 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5f8f495fcf-n2lq2_a2536e9b-8292-474d-ae06-00e4721120b3/manager/0.log" Jan 21 19:08:24 crc kubenswrapper[4799]: I0121 19:08:24.346728 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7ffc46955b-5t4q2_f0bcc23c-7399-4a1f-a91b-f643eaee6e60/manager/0.log" Jan 21 19:08:24 crc kubenswrapper[4799]: I0121 19:08:24.508747 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-7cd8bc9dbb-v4wk5_44bcdd0c-5b20-4387-a105-c8f3fb661a6f/manager/0.log" Jan 21 19:08:24 crc kubenswrapper[4799]: I0121 19:08:24.730810 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-65f5896948-jrzsz_822a89c1-0086-49f1-9bee-6ac87a2af52a/manager/0.log" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.389153 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7r7ss"] Jan 21 19:08:45 crc kubenswrapper[4799]: E0121 19:08:45.390151 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerName="extract-content" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.390166 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerName="extract-content" Jan 21 19:08:45 crc kubenswrapper[4799]: E0121 19:08:45.390197 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerName="registry-server" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.390203 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerName="registry-server" Jan 21 19:08:45 crc kubenswrapper[4799]: E0121 19:08:45.390215 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerName="extract-utilities" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.390222 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerName="extract-utilities" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.390438 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9aaaf17-f698-44fc-b86e-99b2ebfcd235" containerName="registry-server" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.391966 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.401883 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7r7ss"] Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.483674 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn49n\" (UniqueName: \"kubernetes.io/projected/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-kube-api-access-bn49n\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.483759 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-utilities\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.483784 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-catalog-content\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.586249 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn49n\" (UniqueName: \"kubernetes.io/projected/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-kube-api-access-bn49n\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.586362 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-utilities\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.586395 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-catalog-content\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.586889 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-utilities\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.586934 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-catalog-content\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.611195 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn49n\" (UniqueName: \"kubernetes.io/projected/87d5cfd3-7d03-4ff2-9db4-5228fca84f1c-kube-api-access-bn49n\") pod \"certified-operators-7r7ss\" (UID: \"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c\") " pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:45 crc kubenswrapper[4799]: I0121 19:08:45.714741 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:08:46 crc kubenswrapper[4799]: I0121 19:08:46.519733 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7r7ss"] Jan 21 19:08:46 crc kubenswrapper[4799]: I0121 19:08:46.717662 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7r7ss" event={"ID":"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c","Type":"ContainerStarted","Data":"91f6e73d3d6e1c7307c024938d57d2c0488faed241be6b47e98874901774a3f1"} Jan 21 19:08:47 crc kubenswrapper[4799]: I0121 19:08:47.729144 4799 generic.go:334] "Generic (PLEG): container finished" podID="87d5cfd3-7d03-4ff2-9db4-5228fca84f1c" containerID="f72a1b4bbcf03ee00b03ab905609d3438e1eaaa000489a9626c66aa4fa71759c" exitCode=0 Jan 21 19:08:47 crc kubenswrapper[4799]: I0121 19:08:47.729215 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7r7ss" event={"ID":"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c","Type":"ContainerDied","Data":"f72a1b4bbcf03ee00b03ab905609d3438e1eaaa000489a9626c66aa4fa71759c"} Jan 21 19:08:49 crc kubenswrapper[4799]: I0121 19:08:49.463172 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-nzghk_dd4d0095-1e20-4fcf-937f-1351374f36c6/control-plane-machine-set-operator/0.log" Jan 21 19:08:49 crc kubenswrapper[4799]: I0121 19:08:49.639426 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rk6k5_38e1abaa-9da0-4924-a6b5-ee9617cf304d/kube-rbac-proxy/0.log" Jan 21 19:08:49 crc kubenswrapper[4799]: I0121 19:08:49.692632 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rk6k5_38e1abaa-9da0-4924-a6b5-ee9617cf304d/machine-api-operator/0.log" Jan 21 19:08:55 crc kubenswrapper[4799]: I0121 19:08:55.806957 4799 generic.go:334] "Generic (PLEG): container finished" podID="87d5cfd3-7d03-4ff2-9db4-5228fca84f1c" containerID="e2f7efb452631a91e5bdb6b393f10a5d80f9893f9ccaf0c15e3a8a6ff8b36de9" exitCode=0 Jan 21 19:08:55 crc kubenswrapper[4799]: I0121 19:08:55.807669 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7r7ss" event={"ID":"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c","Type":"ContainerDied","Data":"e2f7efb452631a91e5bdb6b393f10a5d80f9893f9ccaf0c15e3a8a6ff8b36de9"} Jan 21 19:08:55 crc kubenswrapper[4799]: I0121 19:08:55.970753 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:08:55 crc kubenswrapper[4799]: I0121 19:08:55.971166 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:08:57 crc kubenswrapper[4799]: I0121 19:08:57.842592 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7r7ss" event={"ID":"87d5cfd3-7d03-4ff2-9db4-5228fca84f1c","Type":"ContainerStarted","Data":"9784dc9ef585a84681fbd6a6421e36c8f80c708387b12e7b45b700010334604f"} Jan 21 19:08:57 crc kubenswrapper[4799]: I0121 19:08:57.868124 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7r7ss" podStartSLOduration=3.593038497 podStartE2EDuration="12.868096262s" podCreationTimestamp="2026-01-21 19:08:45 +0000 UTC" firstStartedPulling="2026-01-21 19:08:47.731123587 +0000 UTC m=+5754.357413620" lastFinishedPulling="2026-01-21 19:08:57.006181362 +0000 UTC m=+5763.632471385" observedRunningTime="2026-01-21 19:08:57.863455321 +0000 UTC m=+5764.489745344" watchObservedRunningTime="2026-01-21 19:08:57.868096262 +0000 UTC m=+5764.494386285" Jan 21 19:09:05 crc kubenswrapper[4799]: I0121 19:09:05.716044 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:09:05 crc kubenswrapper[4799]: I0121 19:09:05.716631 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:09:05 crc kubenswrapper[4799]: I0121 19:09:05.774961 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:09:05 crc kubenswrapper[4799]: I0121 19:09:05.985754 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7r7ss" Jan 21 19:09:06 crc kubenswrapper[4799]: I0121 19:09:06.061046 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7r7ss"] Jan 21 19:09:06 crc kubenswrapper[4799]: I0121 19:09:06.136101 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k7nnk"] Jan 21 19:09:06 crc kubenswrapper[4799]: I0121 19:09:06.136490 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k7nnk" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="registry-server" containerID="cri-o://555d8039e2c6b81a476c185f145b50dea0cc3b8f0613308e238c3c56f727d96f" gracePeriod=2 Jan 21 19:09:06 crc kubenswrapper[4799]: I0121 19:09:06.186518 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-ct8cl_48e93168-c733-4355-b1b2-5cfd895ed094/cert-manager-controller/0.log" Jan 21 19:09:06 crc kubenswrapper[4799]: I0121 19:09:06.524736 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-pmjm6_b64d5a15-e3a7-45be-a22f-730946419bd4/cert-manager-cainjector/0.log" Jan 21 19:09:06 crc kubenswrapper[4799]: I0121 19:09:06.863725 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-xt8bd_de26c870-5c19-414b-9222-c0cd1419550d/cert-manager-webhook/0.log" Jan 21 19:09:06 crc kubenswrapper[4799]: I0121 19:09:06.982637 4799 generic.go:334] "Generic (PLEG): container finished" podID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerID="555d8039e2c6b81a476c185f145b50dea0cc3b8f0613308e238c3c56f727d96f" exitCode=0 Jan 21 19:09:06 crc kubenswrapper[4799]: I0121 19:09:06.982741 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7nnk" event={"ID":"c748650e-351c-4d6c-b16c-6fa29a40a377","Type":"ContainerDied","Data":"555d8039e2c6b81a476c185f145b50dea0cc3b8f0613308e238c3c56f727d96f"} Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.179926 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.311153 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-catalog-content\") pod \"c748650e-351c-4d6c-b16c-6fa29a40a377\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.311349 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-utilities\") pod \"c748650e-351c-4d6c-b16c-6fa29a40a377\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.311448 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-697db\" (UniqueName: \"kubernetes.io/projected/c748650e-351c-4d6c-b16c-6fa29a40a377-kube-api-access-697db\") pod \"c748650e-351c-4d6c-b16c-6fa29a40a377\" (UID: \"c748650e-351c-4d6c-b16c-6fa29a40a377\") " Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.315212 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-utilities" (OuterVolumeSpecName: "utilities") pod "c748650e-351c-4d6c-b16c-6fa29a40a377" (UID: "c748650e-351c-4d6c-b16c-6fa29a40a377"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.331290 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c748650e-351c-4d6c-b16c-6fa29a40a377-kube-api-access-697db" (OuterVolumeSpecName: "kube-api-access-697db") pod "c748650e-351c-4d6c-b16c-6fa29a40a377" (UID: "c748650e-351c-4d6c-b16c-6fa29a40a377"). InnerVolumeSpecName "kube-api-access-697db". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.365667 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c748650e-351c-4d6c-b16c-6fa29a40a377" (UID: "c748650e-351c-4d6c-b16c-6fa29a40a377"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.414595 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.414647 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-697db\" (UniqueName: \"kubernetes.io/projected/c748650e-351c-4d6c-b16c-6fa29a40a377-kube-api-access-697db\") on node \"crc\" DevicePath \"\"" Jan 21 19:09:07 crc kubenswrapper[4799]: I0121 19:09:07.414663 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c748650e-351c-4d6c-b16c-6fa29a40a377-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:09:08 crc kubenswrapper[4799]: I0121 19:09:08.003906 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k7nnk" Jan 21 19:09:08 crc kubenswrapper[4799]: I0121 19:09:08.004866 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k7nnk" event={"ID":"c748650e-351c-4d6c-b16c-6fa29a40a377","Type":"ContainerDied","Data":"8e6d7a4942e24a09e0dc1a670c7fc64a9d1507ad15a6f5f8e576870080991f0b"} Jan 21 19:09:08 crc kubenswrapper[4799]: I0121 19:09:08.004942 4799 scope.go:117] "RemoveContainer" containerID="555d8039e2c6b81a476c185f145b50dea0cc3b8f0613308e238c3c56f727d96f" Jan 21 19:09:08 crc kubenswrapper[4799]: I0121 19:09:08.027039 4799 scope.go:117] "RemoveContainer" containerID="f2565cf586bf049a6605610c718b8dcbffcbe17dcda4a1d00fde81e298e3443b" Jan 21 19:09:08 crc kubenswrapper[4799]: I0121 19:09:08.064176 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k7nnk"] Jan 21 19:09:08 crc kubenswrapper[4799]: I0121 19:09:08.071686 4799 scope.go:117] "RemoveContainer" containerID="31bbffaf4f8373f975fd80725197630f67e8d39b93623543b33c0bb84420d38c" Jan 21 19:09:08 crc kubenswrapper[4799]: I0121 19:09:08.084796 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k7nnk"] Jan 21 19:09:08 crc kubenswrapper[4799]: I0121 19:09:08.219470 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" path="/var/lib/kubelet/pods/c748650e-351c-4d6c-b16c-6fa29a40a377/volumes" Jan 21 19:09:22 crc kubenswrapper[4799]: I0121 19:09:22.191706 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-84q5f_777bc3b4-a1aa-42ec-8639-f08d14be32b4/nmstate-console-plugin/0.log" Jan 21 19:09:22 crc kubenswrapper[4799]: I0121 19:09:22.423323 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-xcfs7_f1d83e65-f17f-4802-a0be-536f18cfe6e2/nmstate-handler/0.log" Jan 21 19:09:22 crc kubenswrapper[4799]: I0121 19:09:22.502078 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-qcsdq_eeedecf6-13c6-4102-a889-a3cec17f120c/kube-rbac-proxy/0.log" Jan 21 19:09:22 crc kubenswrapper[4799]: I0121 19:09:22.667965 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-qcsdq_eeedecf6-13c6-4102-a889-a3cec17f120c/nmstate-metrics/0.log" Jan 21 19:09:22 crc kubenswrapper[4799]: I0121 19:09:22.748371 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-njm8b_fd2c58b3-6fc3-4391-8397-c2b1078e48b8/nmstate-operator/0.log" Jan 21 19:09:22 crc kubenswrapper[4799]: I0121 19:09:22.856105 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-xkl27_9445e980-390c-4759-9dcb-aa2a906f773a/nmstate-webhook/0.log" Jan 21 19:09:25 crc kubenswrapper[4799]: I0121 19:09:25.970424 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:09:25 crc kubenswrapper[4799]: I0121 19:09:25.970858 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:09:37 crc kubenswrapper[4799]: I0121 19:09:37.443392 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-58w9k_d743d591-b616-4e57-8395-ef3565083899/prometheus-operator/0.log" Jan 21 19:09:37 crc kubenswrapper[4799]: I0121 19:09:37.718152 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t_77f855af-53b1-4152-bbff-c818ffa1e32e/prometheus-operator-admission-webhook/0.log" Jan 21 19:09:37 crc kubenswrapper[4799]: I0121 19:09:37.785001 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf_44710bca-2659-43a9-9454-e12123e0c965/prometheus-operator-admission-webhook/0.log" Jan 21 19:09:37 crc kubenswrapper[4799]: I0121 19:09:37.917480 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-xrwhp_d38deaee-a893-47a5-b3d5-c1ea392a894b/operator/0.log" Jan 21 19:09:37 crc kubenswrapper[4799]: I0121 19:09:37.984177 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-w5tlc_3202bb55-0262-452a-9cfe-93088a43c767/perses-operator/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.267897 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-sbtsn_0af2a3ea-da2e-4b99-9486-ce12263a62bf/kube-rbac-proxy/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.430179 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-sbtsn_0af2a3ea-da2e-4b99-9486-ce12263a62bf/controller/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.531215 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-frr-files/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.706462 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-frr-files/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.709608 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-reloader/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.759143 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-metrics/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.761644 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-reloader/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.912959 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-frr-files/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.925843 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-reloader/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.947156 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-metrics/0.log" Jan 21 19:09:52 crc kubenswrapper[4799]: I0121 19:09:52.959963 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-metrics/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.114508 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-frr-files/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.117465 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-reloader/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.159723 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-metrics/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.164906 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/controller/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.298603 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/frr-metrics/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.384472 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/kube-rbac-proxy/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.388829 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/kube-rbac-proxy-frr/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.537551 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/reloader/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.664884 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-4n47t_1568add4-52bd-4796-87e0-2d9fc9f92324/frr-k8s-webhook-server/0.log" Jan 21 19:09:53 crc kubenswrapper[4799]: I0121 19:09:53.850520 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-58dfbb9557-5cwxj_7007eeb3-f638-4564-bef1-01c1799f9659/manager/0.log" Jan 21 19:09:54 crc kubenswrapper[4799]: I0121 19:09:54.049169 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-766568f764-6v2cv_c911a8ad-608f-480c-83b2-672c420e3091/webhook-server/0.log" Jan 21 19:09:54 crc kubenswrapper[4799]: I0121 19:09:54.178822 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cdw6h_6c55e902-cf8f-4a8d-ade3-4bd470144d8e/kube-rbac-proxy/0.log" Jan 21 19:09:54 crc kubenswrapper[4799]: I0121 19:09:54.963073 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cdw6h_6c55e902-cf8f-4a8d-ade3-4bd470144d8e/speaker/0.log" Jan 21 19:09:54 crc kubenswrapper[4799]: I0121 19:09:54.981038 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/frr/0.log" Jan 21 19:09:55 crc kubenswrapper[4799]: I0121 19:09:55.970580 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:09:55 crc kubenswrapper[4799]: I0121 19:09:55.970912 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:09:55 crc kubenswrapper[4799]: I0121 19:09:55.970964 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 19:09:55 crc kubenswrapper[4799]: I0121 19:09:55.971912 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e12abc633fcfdeb9e13a1b3701fb517e8f640918231936c27b04228e02bff853"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 19:09:55 crc kubenswrapper[4799]: I0121 19:09:55.971966 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://e12abc633fcfdeb9e13a1b3701fb517e8f640918231936c27b04228e02bff853" gracePeriod=600 Jan 21 19:09:56 crc kubenswrapper[4799]: I0121 19:09:56.486666 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="e12abc633fcfdeb9e13a1b3701fb517e8f640918231936c27b04228e02bff853" exitCode=0 Jan 21 19:09:56 crc kubenswrapper[4799]: I0121 19:09:56.486906 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"e12abc633fcfdeb9e13a1b3701fb517e8f640918231936c27b04228e02bff853"} Jan 21 19:09:56 crc kubenswrapper[4799]: I0121 19:09:56.487227 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d"} Jan 21 19:09:56 crc kubenswrapper[4799]: I0121 19:09:56.487252 4799 scope.go:117] "RemoveContainer" containerID="9ce312fdd626ff909b4041c25abcd52172296d044384d5236277c001fe934794" Jan 21 19:10:07 crc kubenswrapper[4799]: I0121 19:10:07.512221 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/util/0.log" Jan 21 19:10:07 crc kubenswrapper[4799]: I0121 19:10:07.675590 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/pull/0.log" Jan 21 19:10:07 crc kubenswrapper[4799]: I0121 19:10:07.701777 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/util/0.log" Jan 21 19:10:07 crc kubenswrapper[4799]: I0121 19:10:07.739271 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/pull/0.log" Jan 21 19:10:07 crc kubenswrapper[4799]: I0121 19:10:07.901230 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/util/0.log" Jan 21 19:10:07 crc kubenswrapper[4799]: I0121 19:10:07.940829 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/pull/0.log" Jan 21 19:10:07 crc kubenswrapper[4799]: I0121 19:10:07.949450 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/extract/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.090420 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/util/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.255983 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/util/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.306555 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/pull/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.322476 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/pull/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.520371 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/util/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.524394 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/extract/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.576036 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/pull/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.771488 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/util/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.928949 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/util/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.946974 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/pull/0.log" Jan 21 19:10:08 crc kubenswrapper[4799]: I0121 19:10:08.961585 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/pull/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.128279 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/extract/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.138604 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/util/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.139120 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/pull/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.339112 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-utilities/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.559635 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-utilities/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.561390 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-content/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.575830 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-content/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.741068 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-content/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.770275 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-utilities/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.885955 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/registry-server/0.log" Jan 21 19:10:09 crc kubenswrapper[4799]: I0121 19:10:09.935010 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-utilities/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.201435 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-content/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.201541 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-utilities/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.203157 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-content/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.337826 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-utilities/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.427870 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-content/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.593923 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-g4vrr_1f389163-50cd-4aaa-9b7c-82358ab47826/marketplace-operator/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.689418 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-utilities/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.943981 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-content/0.log" Jan 21 19:10:10 crc kubenswrapper[4799]: I0121 19:10:10.999052 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-utilities/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.010499 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-content/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.207785 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-utilities/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.260108 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/registry-server/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.293108 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-content/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.488465 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-utilities/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.585084 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/registry-server/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.673004 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-content/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.710958 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-utilities/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.756069 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-content/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.901029 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-content/0.log" Jan 21 19:10:11 crc kubenswrapper[4799]: I0121 19:10:11.905363 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-utilities/0.log" Jan 21 19:10:12 crc kubenswrapper[4799]: I0121 19:10:12.558091 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/registry-server/0.log" Jan 21 19:10:25 crc kubenswrapper[4799]: I0121 19:10:25.114999 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-58w9k_d743d591-b616-4e57-8395-ef3565083899/prometheus-operator/0.log" Jan 21 19:10:25 crc kubenswrapper[4799]: I0121 19:10:25.150811 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t_77f855af-53b1-4152-bbff-c818ffa1e32e/prometheus-operator-admission-webhook/0.log" Jan 21 19:10:25 crc kubenswrapper[4799]: I0121 19:10:25.177551 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf_44710bca-2659-43a9-9454-e12123e0c965/prometheus-operator-admission-webhook/0.log" Jan 21 19:10:25 crc kubenswrapper[4799]: I0121 19:10:25.318173 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-xrwhp_d38deaee-a893-47a5-b3d5-c1ea392a894b/operator/0.log" Jan 21 19:10:25 crc kubenswrapper[4799]: I0121 19:10:25.321085 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-w5tlc_3202bb55-0262-452a-9cfe-93088a43c767/perses-operator/0.log" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.873574 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dx2qk"] Jan 21 19:11:49 crc kubenswrapper[4799]: E0121 19:11:49.874510 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="registry-server" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.874524 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="registry-server" Jan 21 19:11:49 crc kubenswrapper[4799]: E0121 19:11:49.874544 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="extract-utilities" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.874552 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="extract-utilities" Jan 21 19:11:49 crc kubenswrapper[4799]: E0121 19:11:49.874563 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="extract-content" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.874569 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="extract-content" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.874788 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c748650e-351c-4d6c-b16c-6fa29a40a377" containerName="registry-server" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.876318 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.892279 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dx2qk"] Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.928652 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg5dx\" (UniqueName: \"kubernetes.io/projected/60ebc1d5-0eff-4b5c-ab7b-916609b918de-kube-api-access-jg5dx\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.929002 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-utilities\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:49 crc kubenswrapper[4799]: I0121 19:11:49.929141 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-catalog-content\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:50 crc kubenswrapper[4799]: I0121 19:11:50.031085 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-utilities\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:50 crc kubenswrapper[4799]: I0121 19:11:50.031212 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-catalog-content\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:50 crc kubenswrapper[4799]: I0121 19:11:50.031250 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg5dx\" (UniqueName: \"kubernetes.io/projected/60ebc1d5-0eff-4b5c-ab7b-916609b918de-kube-api-access-jg5dx\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:50 crc kubenswrapper[4799]: I0121 19:11:50.031881 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-utilities\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:50 crc kubenswrapper[4799]: I0121 19:11:50.031930 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-catalog-content\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:50 crc kubenswrapper[4799]: I0121 19:11:50.074846 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg5dx\" (UniqueName: \"kubernetes.io/projected/60ebc1d5-0eff-4b5c-ab7b-916609b918de-kube-api-access-jg5dx\") pod \"redhat-marketplace-dx2qk\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:50 crc kubenswrapper[4799]: I0121 19:11:50.200386 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:11:50 crc kubenswrapper[4799]: W0121 19:11:50.730294 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60ebc1d5_0eff_4b5c_ab7b_916609b918de.slice/crio-9b5921c5ec6032d2e856f50020dd6dfe984efe74249e750ad070867eb080996d WatchSource:0}: Error finding container 9b5921c5ec6032d2e856f50020dd6dfe984efe74249e750ad070867eb080996d: Status 404 returned error can't find the container with id 9b5921c5ec6032d2e856f50020dd6dfe984efe74249e750ad070867eb080996d Jan 21 19:11:50 crc kubenswrapper[4799]: I0121 19:11:50.730916 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dx2qk"] Jan 21 19:11:51 crc kubenswrapper[4799]: I0121 19:11:51.098734 4799 generic.go:334] "Generic (PLEG): container finished" podID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerID="abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f" exitCode=0 Jan 21 19:11:51 crc kubenswrapper[4799]: I0121 19:11:51.099008 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dx2qk" event={"ID":"60ebc1d5-0eff-4b5c-ab7b-916609b918de","Type":"ContainerDied","Data":"abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f"} Jan 21 19:11:51 crc kubenswrapper[4799]: I0121 19:11:51.099152 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dx2qk" event={"ID":"60ebc1d5-0eff-4b5c-ab7b-916609b918de","Type":"ContainerStarted","Data":"9b5921c5ec6032d2e856f50020dd6dfe984efe74249e750ad070867eb080996d"} Jan 21 19:11:51 crc kubenswrapper[4799]: I0121 19:11:51.102853 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 19:11:53 crc kubenswrapper[4799]: I0121 19:11:53.120531 4799 generic.go:334] "Generic (PLEG): container finished" podID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerID="1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63" exitCode=0 Jan 21 19:11:53 crc kubenswrapper[4799]: I0121 19:11:53.120630 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dx2qk" event={"ID":"60ebc1d5-0eff-4b5c-ab7b-916609b918de","Type":"ContainerDied","Data":"1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63"} Jan 21 19:11:54 crc kubenswrapper[4799]: I0121 19:11:54.138015 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dx2qk" event={"ID":"60ebc1d5-0eff-4b5c-ab7b-916609b918de","Type":"ContainerStarted","Data":"b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7"} Jan 21 19:11:54 crc kubenswrapper[4799]: I0121 19:11:54.161039 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dx2qk" podStartSLOduration=2.680739999 podStartE2EDuration="5.161021462s" podCreationTimestamp="2026-01-21 19:11:49 +0000 UTC" firstStartedPulling="2026-01-21 19:11:51.102418343 +0000 UTC m=+5937.728708406" lastFinishedPulling="2026-01-21 19:11:53.582699846 +0000 UTC m=+5940.208989869" observedRunningTime="2026-01-21 19:11:54.16060369 +0000 UTC m=+5940.786893723" watchObservedRunningTime="2026-01-21 19:11:54.161021462 +0000 UTC m=+5940.787311485" Jan 21 19:12:00 crc kubenswrapper[4799]: I0121 19:12:00.201486 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:12:00 crc kubenswrapper[4799]: I0121 19:12:00.202002 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:12:00 crc kubenswrapper[4799]: I0121 19:12:00.283660 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:12:01 crc kubenswrapper[4799]: I0121 19:12:01.292248 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:12:01 crc kubenswrapper[4799]: I0121 19:12:01.352951 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dx2qk"] Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.237145 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dx2qk" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerName="registry-server" containerID="cri-o://b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7" gracePeriod=2 Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.795460 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.895179 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-utilities\") pod \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.895251 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-utilities" (OuterVolumeSpecName: "utilities") pod "60ebc1d5-0eff-4b5c-ab7b-916609b918de" (UID: "60ebc1d5-0eff-4b5c-ab7b-916609b918de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.895326 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jg5dx\" (UniqueName: \"kubernetes.io/projected/60ebc1d5-0eff-4b5c-ab7b-916609b918de-kube-api-access-jg5dx\") pod \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.895412 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-catalog-content\") pod \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\" (UID: \"60ebc1d5-0eff-4b5c-ab7b-916609b918de\") " Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.897055 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.903539 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ebc1d5-0eff-4b5c-ab7b-916609b918de-kube-api-access-jg5dx" (OuterVolumeSpecName: "kube-api-access-jg5dx") pod "60ebc1d5-0eff-4b5c-ab7b-916609b918de" (UID: "60ebc1d5-0eff-4b5c-ab7b-916609b918de"). InnerVolumeSpecName "kube-api-access-jg5dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:12:03 crc kubenswrapper[4799]: I0121 19:12:03.934844 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60ebc1d5-0eff-4b5c-ab7b-916609b918de" (UID: "60ebc1d5-0eff-4b5c-ab7b-916609b918de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.000353 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jg5dx\" (UniqueName: \"kubernetes.io/projected/60ebc1d5-0eff-4b5c-ab7b-916609b918de-kube-api-access-jg5dx\") on node \"crc\" DevicePath \"\"" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.000422 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ebc1d5-0eff-4b5c-ab7b-916609b918de-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.250697 4799 generic.go:334] "Generic (PLEG): container finished" podID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerID="b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7" exitCode=0 Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.250794 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dx2qk" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.250745 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dx2qk" event={"ID":"60ebc1d5-0eff-4b5c-ab7b-916609b918de","Type":"ContainerDied","Data":"b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7"} Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.251225 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dx2qk" event={"ID":"60ebc1d5-0eff-4b5c-ab7b-916609b918de","Type":"ContainerDied","Data":"9b5921c5ec6032d2e856f50020dd6dfe984efe74249e750ad070867eb080996d"} Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.251269 4799 scope.go:117] "RemoveContainer" containerID="b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.280609 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dx2qk"] Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.288369 4799 scope.go:117] "RemoveContainer" containerID="1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.292068 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dx2qk"] Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.324505 4799 scope.go:117] "RemoveContainer" containerID="abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.375857 4799 scope.go:117] "RemoveContainer" containerID="b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7" Jan 21 19:12:04 crc kubenswrapper[4799]: E0121 19:12:04.376252 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7\": container with ID starting with b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7 not found: ID does not exist" containerID="b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.376324 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7"} err="failed to get container status \"b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7\": rpc error: code = NotFound desc = could not find container \"b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7\": container with ID starting with b845c0ef1a6e67ce25e1fb45498c4e31a5f909531df85a1bc2c6a61de6e83bc7 not found: ID does not exist" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.376377 4799 scope.go:117] "RemoveContainer" containerID="1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63" Jan 21 19:12:04 crc kubenswrapper[4799]: E0121 19:12:04.376871 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63\": container with ID starting with 1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63 not found: ID does not exist" containerID="1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.376981 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63"} err="failed to get container status \"1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63\": rpc error: code = NotFound desc = could not find container \"1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63\": container with ID starting with 1c5ceb3d151fd0f6f28d706707217ef5418507252265eff21e4b8df678ab2b63 not found: ID does not exist" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.377025 4799 scope.go:117] "RemoveContainer" containerID="abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f" Jan 21 19:12:04 crc kubenswrapper[4799]: E0121 19:12:04.377353 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f\": container with ID starting with abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f not found: ID does not exist" containerID="abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f" Jan 21 19:12:04 crc kubenswrapper[4799]: I0121 19:12:04.377558 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f"} err="failed to get container status \"abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f\": rpc error: code = NotFound desc = could not find container \"abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f\": container with ID starting with abba20ae3a14e0765261d9558b670c3bc542d6ec653c936e77d17f5796ca540f not found: ID does not exist" Jan 21 19:12:06 crc kubenswrapper[4799]: I0121 19:12:06.216483 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" path="/var/lib/kubelet/pods/60ebc1d5-0eff-4b5c-ab7b-916609b918de/volumes" Jan 21 19:12:25 crc kubenswrapper[4799]: I0121 19:12:25.972167 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:12:25 crc kubenswrapper[4799]: I0121 19:12:25.972840 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:12:36 crc kubenswrapper[4799]: I0121 19:12:36.634384 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerID="b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619" exitCode=0 Jan 21 19:12:36 crc kubenswrapper[4799]: I0121 19:12:36.634478 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-24c4t/must-gather-tr5nf" event={"ID":"b3649db7-cfc4-45b4-9638-40680f5fd784","Type":"ContainerDied","Data":"b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619"} Jan 21 19:12:36 crc kubenswrapper[4799]: I0121 19:12:36.635676 4799 scope.go:117] "RemoveContainer" containerID="b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619" Jan 21 19:12:36 crc kubenswrapper[4799]: I0121 19:12:36.733930 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-24c4t_must-gather-tr5nf_b3649db7-cfc4-45b4-9638-40680f5fd784/gather/0.log" Jan 21 19:12:41 crc kubenswrapper[4799]: I0121 19:12:41.949331 4799 scope.go:117] "RemoveContainer" containerID="ef69ae2e51bd85439fdbe0d722116aa73b23c328021054f693d088db99dea163" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.132777 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-24c4t/must-gather-tr5nf"] Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.134510 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-24c4t/must-gather-tr5nf" podUID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerName="copy" containerID="cri-o://d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6" gracePeriod=2 Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.143434 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-24c4t/must-gather-tr5nf"] Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.561964 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-24c4t_must-gather-tr5nf_b3649db7-cfc4-45b4-9638-40680f5fd784/copy/0.log" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.562666 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.674752 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3649db7-cfc4-45b4-9638-40680f5fd784-must-gather-output\") pod \"b3649db7-cfc4-45b4-9638-40680f5fd784\" (UID: \"b3649db7-cfc4-45b4-9638-40680f5fd784\") " Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.674856 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgc5d\" (UniqueName: \"kubernetes.io/projected/b3649db7-cfc4-45b4-9638-40680f5fd784-kube-api-access-hgc5d\") pod \"b3649db7-cfc4-45b4-9638-40680f5fd784\" (UID: \"b3649db7-cfc4-45b4-9638-40680f5fd784\") " Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.700361 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3649db7-cfc4-45b4-9638-40680f5fd784-kube-api-access-hgc5d" (OuterVolumeSpecName: "kube-api-access-hgc5d") pod "b3649db7-cfc4-45b4-9638-40680f5fd784" (UID: "b3649db7-cfc4-45b4-9638-40680f5fd784"). InnerVolumeSpecName "kube-api-access-hgc5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.751314 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-24c4t_must-gather-tr5nf_b3649db7-cfc4-45b4-9638-40680f5fd784/copy/0.log" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.751960 4799 generic.go:334] "Generic (PLEG): container finished" podID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerID="d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6" exitCode=143 Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.752029 4799 scope.go:117] "RemoveContainer" containerID="d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.752282 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-24c4t/must-gather-tr5nf" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.783983 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgc5d\" (UniqueName: \"kubernetes.io/projected/b3649db7-cfc4-45b4-9638-40680f5fd784-kube-api-access-hgc5d\") on node \"crc\" DevicePath \"\"" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.792523 4799 scope.go:117] "RemoveContainer" containerID="b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.854726 4799 scope.go:117] "RemoveContainer" containerID="d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6" Jan 21 19:12:45 crc kubenswrapper[4799]: E0121 19:12:45.855237 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6\": container with ID starting with d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6 not found: ID does not exist" containerID="d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.855344 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6"} err="failed to get container status \"d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6\": rpc error: code = NotFound desc = could not find container \"d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6\": container with ID starting with d66c3303a11042d90bdf95c644cadb60f4b22ab517a0eaa4a5b9211263828eb6 not found: ID does not exist" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.855459 4799 scope.go:117] "RemoveContainer" containerID="b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619" Jan 21 19:12:45 crc kubenswrapper[4799]: E0121 19:12:45.855790 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619\": container with ID starting with b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619 not found: ID does not exist" containerID="b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.855863 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619"} err="failed to get container status \"b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619\": rpc error: code = NotFound desc = could not find container \"b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619\": container with ID starting with b4d0b5dc12e1be500a4eab5179819025aa601aaae8ff889ef9facb638d91e619 not found: ID does not exist" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.915161 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3649db7-cfc4-45b4-9638-40680f5fd784-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "b3649db7-cfc4-45b4-9638-40680f5fd784" (UID: "b3649db7-cfc4-45b4-9638-40680f5fd784"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:12:45 crc kubenswrapper[4799]: I0121 19:12:45.990011 4799 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3649db7-cfc4-45b4-9638-40680f5fd784-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 21 19:12:46 crc kubenswrapper[4799]: I0121 19:12:46.230466 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3649db7-cfc4-45b4-9638-40680f5fd784" path="/var/lib/kubelet/pods/b3649db7-cfc4-45b4-9638-40680f5fd784/volumes" Jan 21 19:12:55 crc kubenswrapper[4799]: I0121 19:12:55.970917 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:12:55 crc kubenswrapper[4799]: I0121 19:12:55.971418 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.510569 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lg6mw"] Jan 21 19:13:01 crc kubenswrapper[4799]: E0121 19:13:01.511983 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerName="gather" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.512005 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerName="gather" Jan 21 19:13:01 crc kubenswrapper[4799]: E0121 19:13:01.512037 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerName="extract-utilities" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.512047 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerName="extract-utilities" Jan 21 19:13:01 crc kubenswrapper[4799]: E0121 19:13:01.512063 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerName="copy" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.512073 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerName="copy" Jan 21 19:13:01 crc kubenswrapper[4799]: E0121 19:13:01.512097 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerName="registry-server" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.512108 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerName="registry-server" Jan 21 19:13:01 crc kubenswrapper[4799]: E0121 19:13:01.512123 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerName="extract-content" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.512157 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerName="extract-content" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.512806 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerName="gather" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.512845 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3649db7-cfc4-45b4-9638-40680f5fd784" containerName="copy" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.512879 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="60ebc1d5-0eff-4b5c-ab7b-916609b918de" containerName="registry-server" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.516857 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.551296 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lg6mw"] Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.713262 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-catalog-content\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.713351 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-utilities\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.713788 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lrqt\" (UniqueName: \"kubernetes.io/projected/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-kube-api-access-5lrqt\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.815709 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-catalog-content\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.815828 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-utilities\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.815907 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lrqt\" (UniqueName: \"kubernetes.io/projected/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-kube-api-access-5lrqt\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.816405 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-utilities\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.816751 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-catalog-content\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.838183 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lrqt\" (UniqueName: \"kubernetes.io/projected/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-kube-api-access-5lrqt\") pod \"redhat-operators-lg6mw\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:01 crc kubenswrapper[4799]: I0121 19:13:01.843703 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:02 crc kubenswrapper[4799]: I0121 19:13:02.413809 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lg6mw"] Jan 21 19:13:02 crc kubenswrapper[4799]: I0121 19:13:02.920408 4799 generic.go:334] "Generic (PLEG): container finished" podID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerID="52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803" exitCode=0 Jan 21 19:13:02 crc kubenswrapper[4799]: I0121 19:13:02.920488 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lg6mw" event={"ID":"b95c7c75-2769-4a53-a4ab-9507ba7c7c45","Type":"ContainerDied","Data":"52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803"} Jan 21 19:13:02 crc kubenswrapper[4799]: I0121 19:13:02.920561 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lg6mw" event={"ID":"b95c7c75-2769-4a53-a4ab-9507ba7c7c45","Type":"ContainerStarted","Data":"946dbbcd8f1e4f6b8a85e3d9f10ff178408d7398dfbf2ea375f630a7dc59facb"} Jan 21 19:13:04 crc kubenswrapper[4799]: I0121 19:13:04.948671 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lg6mw" event={"ID":"b95c7c75-2769-4a53-a4ab-9507ba7c7c45","Type":"ContainerStarted","Data":"54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd"} Jan 21 19:13:06 crc kubenswrapper[4799]: I0121 19:13:06.979153 4799 generic.go:334] "Generic (PLEG): container finished" podID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerID="54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd" exitCode=0 Jan 21 19:13:06 crc kubenswrapper[4799]: I0121 19:13:06.979211 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lg6mw" event={"ID":"b95c7c75-2769-4a53-a4ab-9507ba7c7c45","Type":"ContainerDied","Data":"54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd"} Jan 21 19:13:07 crc kubenswrapper[4799]: I0121 19:13:07.991046 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lg6mw" event={"ID":"b95c7c75-2769-4a53-a4ab-9507ba7c7c45","Type":"ContainerStarted","Data":"185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f"} Jan 21 19:13:11 crc kubenswrapper[4799]: I0121 19:13:11.844301 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:11 crc kubenswrapper[4799]: I0121 19:13:11.844596 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:12 crc kubenswrapper[4799]: I0121 19:13:12.888690 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lg6mw" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="registry-server" probeResult="failure" output=< Jan 21 19:13:12 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 19:13:12 crc kubenswrapper[4799]: > Jan 21 19:13:21 crc kubenswrapper[4799]: I0121 19:13:21.955776 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:22 crc kubenswrapper[4799]: I0121 19:13:22.039796 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lg6mw" podStartSLOduration=16.349640396 podStartE2EDuration="21.039766511s" podCreationTimestamp="2026-01-21 19:13:01 +0000 UTC" firstStartedPulling="2026-01-21 19:13:02.922611864 +0000 UTC m=+6009.548901887" lastFinishedPulling="2026-01-21 19:13:07.612737969 +0000 UTC m=+6014.239028002" observedRunningTime="2026-01-21 19:13:08.013741922 +0000 UTC m=+6014.640031945" watchObservedRunningTime="2026-01-21 19:13:22.039766511 +0000 UTC m=+6028.666056544" Jan 21 19:13:22 crc kubenswrapper[4799]: I0121 19:13:22.063591 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:22 crc kubenswrapper[4799]: I0121 19:13:22.217031 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lg6mw"] Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.145424 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lg6mw" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="registry-server" containerID="cri-o://185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f" gracePeriod=2 Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.648402 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.723884 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-catalog-content\") pod \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.723975 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-utilities\") pod \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.724307 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lrqt\" (UniqueName: \"kubernetes.io/projected/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-kube-api-access-5lrqt\") pod \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\" (UID: \"b95c7c75-2769-4a53-a4ab-9507ba7c7c45\") " Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.724963 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-utilities" (OuterVolumeSpecName: "utilities") pod "b95c7c75-2769-4a53-a4ab-9507ba7c7c45" (UID: "b95c7c75-2769-4a53-a4ab-9507ba7c7c45"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.733686 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-kube-api-access-5lrqt" (OuterVolumeSpecName: "kube-api-access-5lrqt") pod "b95c7c75-2769-4a53-a4ab-9507ba7c7c45" (UID: "b95c7c75-2769-4a53-a4ab-9507ba7c7c45"). InnerVolumeSpecName "kube-api-access-5lrqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.826830 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lrqt\" (UniqueName: \"kubernetes.io/projected/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-kube-api-access-5lrqt\") on node \"crc\" DevicePath \"\"" Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.826864 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.848542 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b95c7c75-2769-4a53-a4ab-9507ba7c7c45" (UID: "b95c7c75-2769-4a53-a4ab-9507ba7c7c45"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:13:23 crc kubenswrapper[4799]: I0121 19:13:23.929496 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b95c7c75-2769-4a53-a4ab-9507ba7c7c45-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.157622 4799 generic.go:334] "Generic (PLEG): container finished" podID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerID="185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f" exitCode=0 Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.157681 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lg6mw" event={"ID":"b95c7c75-2769-4a53-a4ab-9507ba7c7c45","Type":"ContainerDied","Data":"185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f"} Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.157741 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lg6mw" event={"ID":"b95c7c75-2769-4a53-a4ab-9507ba7c7c45","Type":"ContainerDied","Data":"946dbbcd8f1e4f6b8a85e3d9f10ff178408d7398dfbf2ea375f630a7dc59facb"} Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.157764 4799 scope.go:117] "RemoveContainer" containerID="185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.157708 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lg6mw" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.197110 4799 scope.go:117] "RemoveContainer" containerID="54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.199330 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lg6mw"] Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.222710 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lg6mw"] Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.240932 4799 scope.go:117] "RemoveContainer" containerID="52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.283337 4799 scope.go:117] "RemoveContainer" containerID="185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f" Jan 21 19:13:24 crc kubenswrapper[4799]: E0121 19:13:24.284427 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f\": container with ID starting with 185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f not found: ID does not exist" containerID="185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.284486 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f"} err="failed to get container status \"185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f\": rpc error: code = NotFound desc = could not find container \"185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f\": container with ID starting with 185b5d7701acda1fe72c1322dedcce64e584d45d62469dd4c410be227950336f not found: ID does not exist" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.284523 4799 scope.go:117] "RemoveContainer" containerID="54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd" Jan 21 19:13:24 crc kubenswrapper[4799]: E0121 19:13:24.284967 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd\": container with ID starting with 54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd not found: ID does not exist" containerID="54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.284993 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd"} err="failed to get container status \"54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd\": rpc error: code = NotFound desc = could not find container \"54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd\": container with ID starting with 54a696a670e617e0bab124acdcc48525af20621861943829e6237dae4fa505bd not found: ID does not exist" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.285014 4799 scope.go:117] "RemoveContainer" containerID="52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803" Jan 21 19:13:24 crc kubenswrapper[4799]: E0121 19:13:24.285437 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803\": container with ID starting with 52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803 not found: ID does not exist" containerID="52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803" Jan 21 19:13:24 crc kubenswrapper[4799]: I0121 19:13:24.285468 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803"} err="failed to get container status \"52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803\": rpc error: code = NotFound desc = could not find container \"52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803\": container with ID starting with 52a600bffd0025520defd38445ead3c9f85f9a4be32acee87e9308ea291a5803 not found: ID does not exist" Jan 21 19:13:25 crc kubenswrapper[4799]: I0121 19:13:25.971090 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:13:25 crc kubenswrapper[4799]: I0121 19:13:25.971429 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:13:25 crc kubenswrapper[4799]: I0121 19:13:25.971489 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 19:13:25 crc kubenswrapper[4799]: I0121 19:13:25.972371 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 19:13:25 crc kubenswrapper[4799]: I0121 19:13:25.972425 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" gracePeriod=600 Jan 21 19:13:26 crc kubenswrapper[4799]: E0121 19:13:26.112371 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:13:26 crc kubenswrapper[4799]: I0121 19:13:26.179014 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" exitCode=0 Jan 21 19:13:26 crc kubenswrapper[4799]: I0121 19:13:26.179068 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d"} Jan 21 19:13:26 crc kubenswrapper[4799]: I0121 19:13:26.179121 4799 scope.go:117] "RemoveContainer" containerID="e12abc633fcfdeb9e13a1b3701fb517e8f640918231936c27b04228e02bff853" Jan 21 19:13:26 crc kubenswrapper[4799]: I0121 19:13:26.179982 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:13:26 crc kubenswrapper[4799]: E0121 19:13:26.180335 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:13:26 crc kubenswrapper[4799]: I0121 19:13:26.221403 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" path="/var/lib/kubelet/pods/b95c7c75-2769-4a53-a4ab-9507ba7c7c45/volumes" Jan 21 19:13:39 crc kubenswrapper[4799]: I0121 19:13:39.206301 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:13:39 crc kubenswrapper[4799]: E0121 19:13:39.207488 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:13:42 crc kubenswrapper[4799]: I0121 19:13:42.032221 4799 scope.go:117] "RemoveContainer" containerID="1ef601f3be665c2c6958d796b8871c621dbd4688508a54828098c2ee08ff609a" Jan 21 19:13:52 crc kubenswrapper[4799]: I0121 19:13:52.205242 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:13:52 crc kubenswrapper[4799]: E0121 19:13:52.206091 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:14:06 crc kubenswrapper[4799]: I0121 19:14:06.205246 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:14:06 crc kubenswrapper[4799]: E0121 19:14:06.206034 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:14:17 crc kubenswrapper[4799]: I0121 19:14:17.205603 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:14:17 crc kubenswrapper[4799]: E0121 19:14:17.206691 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:14:28 crc kubenswrapper[4799]: I0121 19:14:28.206161 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:14:28 crc kubenswrapper[4799]: E0121 19:14:28.207063 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:14:42 crc kubenswrapper[4799]: I0121 19:14:42.205965 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:14:42 crc kubenswrapper[4799]: E0121 19:14:42.207635 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:14:56 crc kubenswrapper[4799]: I0121 19:14:56.205794 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:14:56 crc kubenswrapper[4799]: E0121 19:14:56.206597 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.154389 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5"] Jan 21 19:15:00 crc kubenswrapper[4799]: E0121 19:15:00.155513 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="extract-utilities" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.155530 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="extract-utilities" Jan 21 19:15:00 crc kubenswrapper[4799]: E0121 19:15:00.155565 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="extract-content" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.155575 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="extract-content" Jan 21 19:15:00 crc kubenswrapper[4799]: E0121 19:15:00.155607 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="registry-server" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.155614 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="registry-server" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.155880 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="b95c7c75-2769-4a53-a4ab-9507ba7c7c45" containerName="registry-server" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.156704 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.159612 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.162022 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.201478 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58d3feb7-4438-4453-ab1e-a12ad5292f7a-secret-volume\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.202384 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58d3feb7-4438-4453-ab1e-a12ad5292f7a-config-volume\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.202497 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6rm5\" (UniqueName: \"kubernetes.io/projected/58d3feb7-4438-4453-ab1e-a12ad5292f7a-kube-api-access-z6rm5\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.204450 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5"] Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.304787 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58d3feb7-4438-4453-ab1e-a12ad5292f7a-config-volume\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.305146 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6rm5\" (UniqueName: \"kubernetes.io/projected/58d3feb7-4438-4453-ab1e-a12ad5292f7a-kube-api-access-z6rm5\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.305238 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58d3feb7-4438-4453-ab1e-a12ad5292f7a-secret-volume\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.305695 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58d3feb7-4438-4453-ab1e-a12ad5292f7a-config-volume\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.312137 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58d3feb7-4438-4453-ab1e-a12ad5292f7a-secret-volume\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.323382 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6rm5\" (UniqueName: \"kubernetes.io/projected/58d3feb7-4438-4453-ab1e-a12ad5292f7a-kube-api-access-z6rm5\") pod \"collect-profiles-29483715-fpmh5\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:00 crc kubenswrapper[4799]: I0121 19:15:00.516982 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:01 crc kubenswrapper[4799]: W0121 19:15:01.026268 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58d3feb7_4438_4453_ab1e_a12ad5292f7a.slice/crio-881cb9b798758e884ece5976e11b8008dec8d3e102cbb8b6b5813ed0b0985f5b WatchSource:0}: Error finding container 881cb9b798758e884ece5976e11b8008dec8d3e102cbb8b6b5813ed0b0985f5b: Status 404 returned error can't find the container with id 881cb9b798758e884ece5976e11b8008dec8d3e102cbb8b6b5813ed0b0985f5b Jan 21 19:15:01 crc kubenswrapper[4799]: I0121 19:15:01.027182 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5"] Jan 21 19:15:01 crc kubenswrapper[4799]: I0121 19:15:01.366352 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" event={"ID":"58d3feb7-4438-4453-ab1e-a12ad5292f7a","Type":"ContainerStarted","Data":"84a57e38eae85ff86d91be2705fe157fa7cfb441869c54696ac9ac6f88757854"} Jan 21 19:15:01 crc kubenswrapper[4799]: I0121 19:15:01.366728 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" event={"ID":"58d3feb7-4438-4453-ab1e-a12ad5292f7a","Type":"ContainerStarted","Data":"881cb9b798758e884ece5976e11b8008dec8d3e102cbb8b6b5813ed0b0985f5b"} Jan 21 19:15:01 crc kubenswrapper[4799]: I0121 19:15:01.397327 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" podStartSLOduration=1.397306142 podStartE2EDuration="1.397306142s" podCreationTimestamp="2026-01-21 19:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 19:15:01.3865969 +0000 UTC m=+6128.012886953" watchObservedRunningTime="2026-01-21 19:15:01.397306142 +0000 UTC m=+6128.023596165" Jan 21 19:15:02 crc kubenswrapper[4799]: I0121 19:15:02.380425 4799 generic.go:334] "Generic (PLEG): container finished" podID="58d3feb7-4438-4453-ab1e-a12ad5292f7a" containerID="84a57e38eae85ff86d91be2705fe157fa7cfb441869c54696ac9ac6f88757854" exitCode=0 Jan 21 19:15:02 crc kubenswrapper[4799]: I0121 19:15:02.380486 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" event={"ID":"58d3feb7-4438-4453-ab1e-a12ad5292f7a","Type":"ContainerDied","Data":"84a57e38eae85ff86d91be2705fe157fa7cfb441869c54696ac9ac6f88757854"} Jan 21 19:15:03 crc kubenswrapper[4799]: I0121 19:15:03.761775 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:03 crc kubenswrapper[4799]: I0121 19:15:03.944041 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6rm5\" (UniqueName: \"kubernetes.io/projected/58d3feb7-4438-4453-ab1e-a12ad5292f7a-kube-api-access-z6rm5\") pod \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " Jan 21 19:15:03 crc kubenswrapper[4799]: I0121 19:15:03.944367 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58d3feb7-4438-4453-ab1e-a12ad5292f7a-config-volume\") pod \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " Jan 21 19:15:03 crc kubenswrapper[4799]: I0121 19:15:03.944442 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58d3feb7-4438-4453-ab1e-a12ad5292f7a-secret-volume\") pod \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\" (UID: \"58d3feb7-4438-4453-ab1e-a12ad5292f7a\") " Jan 21 19:15:03 crc kubenswrapper[4799]: I0121 19:15:03.945220 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58d3feb7-4438-4453-ab1e-a12ad5292f7a-config-volume" (OuterVolumeSpecName: "config-volume") pod "58d3feb7-4438-4453-ab1e-a12ad5292f7a" (UID: "58d3feb7-4438-4453-ab1e-a12ad5292f7a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 21 19:15:03 crc kubenswrapper[4799]: I0121 19:15:03.952289 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58d3feb7-4438-4453-ab1e-a12ad5292f7a-kube-api-access-z6rm5" (OuterVolumeSpecName: "kube-api-access-z6rm5") pod "58d3feb7-4438-4453-ab1e-a12ad5292f7a" (UID: "58d3feb7-4438-4453-ab1e-a12ad5292f7a"). InnerVolumeSpecName "kube-api-access-z6rm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:15:03 crc kubenswrapper[4799]: I0121 19:15:03.954545 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58d3feb7-4438-4453-ab1e-a12ad5292f7a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "58d3feb7-4438-4453-ab1e-a12ad5292f7a" (UID: "58d3feb7-4438-4453-ab1e-a12ad5292f7a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 21 19:15:04 crc kubenswrapper[4799]: I0121 19:15:04.047068 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6rm5\" (UniqueName: \"kubernetes.io/projected/58d3feb7-4438-4453-ab1e-a12ad5292f7a-kube-api-access-z6rm5\") on node \"crc\" DevicePath \"\"" Jan 21 19:15:04 crc kubenswrapper[4799]: I0121 19:15:04.047099 4799 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/58d3feb7-4438-4453-ab1e-a12ad5292f7a-config-volume\") on node \"crc\" DevicePath \"\"" Jan 21 19:15:04 crc kubenswrapper[4799]: I0121 19:15:04.047109 4799 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/58d3feb7-4438-4453-ab1e-a12ad5292f7a-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 21 19:15:04 crc kubenswrapper[4799]: I0121 19:15:04.402911 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" event={"ID":"58d3feb7-4438-4453-ab1e-a12ad5292f7a","Type":"ContainerDied","Data":"881cb9b798758e884ece5976e11b8008dec8d3e102cbb8b6b5813ed0b0985f5b"} Jan 21 19:15:04 crc kubenswrapper[4799]: I0121 19:15:04.403275 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="881cb9b798758e884ece5976e11b8008dec8d3e102cbb8b6b5813ed0b0985f5b" Jan 21 19:15:04 crc kubenswrapper[4799]: I0121 19:15:04.403021 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29483715-fpmh5" Jan 21 19:15:04 crc kubenswrapper[4799]: I0121 19:15:04.467084 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk"] Jan 21 19:15:04 crc kubenswrapper[4799]: I0121 19:15:04.477558 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29483670-q9tnk"] Jan 21 19:15:06 crc kubenswrapper[4799]: I0121 19:15:06.219644 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcebec36-4bde-4f86-9253-47e839e4011e" path="/var/lib/kubelet/pods/bcebec36-4bde-4f86-9253-47e839e4011e/volumes" Jan 21 19:15:09 crc kubenswrapper[4799]: I0121 19:15:09.205466 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:15:09 crc kubenswrapper[4799]: E0121 19:15:09.206164 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:15:21 crc kubenswrapper[4799]: I0121 19:15:21.209859 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:15:21 crc kubenswrapper[4799]: E0121 19:15:21.211288 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:15:36 crc kubenswrapper[4799]: I0121 19:15:36.206369 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:15:36 crc kubenswrapper[4799]: E0121 19:15:36.207867 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:15:42 crc kubenswrapper[4799]: I0121 19:15:42.168493 4799 scope.go:117] "RemoveContainer" containerID="c7a7177076137e8f2ff56f34d3481b9ec538925b10d6c1a0fef1c77c97b53b0e" Jan 21 19:15:47 crc kubenswrapper[4799]: I0121 19:15:47.205532 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:15:47 crc kubenswrapper[4799]: E0121 19:15:47.206478 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:15:58 crc kubenswrapper[4799]: I0121 19:15:58.204723 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:15:58 crc kubenswrapper[4799]: E0121 19:15:58.205542 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:16:10 crc kubenswrapper[4799]: I0121 19:16:10.205371 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:16:10 crc kubenswrapper[4799]: E0121 19:16:10.207287 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.048382 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-g6g7f/must-gather-tvtrx"] Jan 21 19:16:16 crc kubenswrapper[4799]: E0121 19:16:16.061662 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58d3feb7-4438-4453-ab1e-a12ad5292f7a" containerName="collect-profiles" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.061698 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="58d3feb7-4438-4453-ab1e-a12ad5292f7a" containerName="collect-profiles" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.061998 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="58d3feb7-4438-4453-ab1e-a12ad5292f7a" containerName="collect-profiles" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.063259 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.067317 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-g6g7f"/"kube-root-ca.crt" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.067361 4799 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-g6g7f"/"default-dockercfg-2cpb6" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.067538 4799 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-g6g7f"/"openshift-service-ca.crt" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.097903 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-g6g7f/must-gather-tvtrx"] Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.273297 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-must-gather-output\") pod \"must-gather-tvtrx\" (UID: \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\") " pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.273648 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz666\" (UniqueName: \"kubernetes.io/projected/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-kube-api-access-wz666\") pod \"must-gather-tvtrx\" (UID: \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\") " pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.380748 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz666\" (UniqueName: \"kubernetes.io/projected/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-kube-api-access-wz666\") pod \"must-gather-tvtrx\" (UID: \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\") " pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.380988 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-must-gather-output\") pod \"must-gather-tvtrx\" (UID: \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\") " pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.381386 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-must-gather-output\") pod \"must-gather-tvtrx\" (UID: \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\") " pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.399706 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz666\" (UniqueName: \"kubernetes.io/projected/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-kube-api-access-wz666\") pod \"must-gather-tvtrx\" (UID: \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\") " pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.410595 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:16:16 crc kubenswrapper[4799]: I0121 19:16:16.932832 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-g6g7f/must-gather-tvtrx"] Jan 21 19:16:17 crc kubenswrapper[4799]: I0121 19:16:17.311495 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" event={"ID":"4f422c5e-e37c-49ba-b445-fd8f178ee3e3","Type":"ContainerStarted","Data":"07bf3e610a46c2af8214f20ad6ffdca5b0aad1ad1188a841b9d36354b530d1a5"} Jan 21 19:16:17 crc kubenswrapper[4799]: I0121 19:16:17.311535 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" event={"ID":"4f422c5e-e37c-49ba-b445-fd8f178ee3e3","Type":"ContainerStarted","Data":"701f1b92b1a43a61cd8f770cd5b80351dee275154fb14fb48a8a7cbf930081ad"} Jan 21 19:16:18 crc kubenswrapper[4799]: I0121 19:16:18.325220 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" event={"ID":"4f422c5e-e37c-49ba-b445-fd8f178ee3e3","Type":"ContainerStarted","Data":"37cd0687dcf0248ae11a5eac750b6b8ac6e878750a4e427bac1c61f81c0ece8f"} Jan 21 19:16:18 crc kubenswrapper[4799]: I0121 19:16:18.351745 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" podStartSLOduration=2.3516806199999998 podStartE2EDuration="2.35168062s" podCreationTimestamp="2026-01-21 19:16:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 19:16:18.344887868 +0000 UTC m=+6204.971177901" watchObservedRunningTime="2026-01-21 19:16:18.35168062 +0000 UTC m=+6204.978004754" Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.206464 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-9zrk6"] Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.208205 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.292892 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-host\") pod \"crc-debug-9zrk6\" (UID: \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\") " pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.293284 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6w4h\" (UniqueName: \"kubernetes.io/projected/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-kube-api-access-p6w4h\") pod \"crc-debug-9zrk6\" (UID: \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\") " pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.396310 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6w4h\" (UniqueName: \"kubernetes.io/projected/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-kube-api-access-p6w4h\") pod \"crc-debug-9zrk6\" (UID: \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\") " pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.397059 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-host\") pod \"crc-debug-9zrk6\" (UID: \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\") " pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.397263 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-host\") pod \"crc-debug-9zrk6\" (UID: \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\") " pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.433498 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6w4h\" (UniqueName: \"kubernetes.io/projected/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-kube-api-access-p6w4h\") pod \"crc-debug-9zrk6\" (UID: \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\") " pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:16:21 crc kubenswrapper[4799]: I0121 19:16:21.529945 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:16:21 crc kubenswrapper[4799]: W0121 19:16:21.559593 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9da45b2f_bff6_4c35_8504_94e7f7cf51ac.slice/crio-d7f1681c9ca775b4909d8c3d5400c0ac784bac2cba9c3b3068aca76e76825b0e WatchSource:0}: Error finding container d7f1681c9ca775b4909d8c3d5400c0ac784bac2cba9c3b3068aca76e76825b0e: Status 404 returned error can't find the container with id d7f1681c9ca775b4909d8c3d5400c0ac784bac2cba9c3b3068aca76e76825b0e Jan 21 19:16:22 crc kubenswrapper[4799]: I0121 19:16:22.378648 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" event={"ID":"9da45b2f-bff6-4c35-8504-94e7f7cf51ac","Type":"ContainerStarted","Data":"a277726e87ee9fb20268c24407df2ef1f780997b6f9b110afe6217ed6821a40a"} Jan 21 19:16:22 crc kubenswrapper[4799]: I0121 19:16:22.379291 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" event={"ID":"9da45b2f-bff6-4c35-8504-94e7f7cf51ac","Type":"ContainerStarted","Data":"d7f1681c9ca775b4909d8c3d5400c0ac784bac2cba9c3b3068aca76e76825b0e"} Jan 21 19:16:22 crc kubenswrapper[4799]: I0121 19:16:22.396249 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" podStartSLOduration=1.396225395 podStartE2EDuration="1.396225395s" podCreationTimestamp="2026-01-21 19:16:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-21 19:16:22.393771775 +0000 UTC m=+6209.020061798" watchObservedRunningTime="2026-01-21 19:16:22.396225395 +0000 UTC m=+6209.022515418" Jan 21 19:16:25 crc kubenswrapper[4799]: I0121 19:16:25.204871 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:16:25 crc kubenswrapper[4799]: E0121 19:16:25.205620 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:16:39 crc kubenswrapper[4799]: I0121 19:16:39.205198 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:16:39 crc kubenswrapper[4799]: E0121 19:16:39.206246 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:16:52 crc kubenswrapper[4799]: I0121 19:16:52.214218 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:16:52 crc kubenswrapper[4799]: E0121 19:16:52.219529 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:17:03 crc kubenswrapper[4799]: I0121 19:17:03.810343 4799 generic.go:334] "Generic (PLEG): container finished" podID="9da45b2f-bff6-4c35-8504-94e7f7cf51ac" containerID="a277726e87ee9fb20268c24407df2ef1f780997b6f9b110afe6217ed6821a40a" exitCode=0 Jan 21 19:17:03 crc kubenswrapper[4799]: I0121 19:17:03.810426 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" event={"ID":"9da45b2f-bff6-4c35-8504-94e7f7cf51ac","Type":"ContainerDied","Data":"a277726e87ee9fb20268c24407df2ef1f780997b6f9b110afe6217ed6821a40a"} Jan 21 19:17:04 crc kubenswrapper[4799]: I0121 19:17:04.946260 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:17:04 crc kubenswrapper[4799]: I0121 19:17:04.970070 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6w4h\" (UniqueName: \"kubernetes.io/projected/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-kube-api-access-p6w4h\") pod \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\" (UID: \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\") " Jan 21 19:17:04 crc kubenswrapper[4799]: I0121 19:17:04.970308 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-host\") pod \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\" (UID: \"9da45b2f-bff6-4c35-8504-94e7f7cf51ac\") " Jan 21 19:17:04 crc kubenswrapper[4799]: I0121 19:17:04.970369 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-host" (OuterVolumeSpecName: "host") pod "9da45b2f-bff6-4c35-8504-94e7f7cf51ac" (UID: "9da45b2f-bff6-4c35-8504-94e7f7cf51ac"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 19:17:04 crc kubenswrapper[4799]: I0121 19:17:04.970923 4799 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-host\") on node \"crc\" DevicePath \"\"" Jan 21 19:17:04 crc kubenswrapper[4799]: I0121 19:17:04.976561 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-kube-api-access-p6w4h" (OuterVolumeSpecName: "kube-api-access-p6w4h") pod "9da45b2f-bff6-4c35-8504-94e7f7cf51ac" (UID: "9da45b2f-bff6-4c35-8504-94e7f7cf51ac"). InnerVolumeSpecName "kube-api-access-p6w4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:17:04 crc kubenswrapper[4799]: I0121 19:17:04.984760 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-9zrk6"] Jan 21 19:17:04 crc kubenswrapper[4799]: I0121 19:17:04.999875 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-9zrk6"] Jan 21 19:17:05 crc kubenswrapper[4799]: I0121 19:17:05.073306 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6w4h\" (UniqueName: \"kubernetes.io/projected/9da45b2f-bff6-4c35-8504-94e7f7cf51ac-kube-api-access-p6w4h\") on node \"crc\" DevicePath \"\"" Jan 21 19:17:05 crc kubenswrapper[4799]: I0121 19:17:05.830842 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7f1681c9ca775b4909d8c3d5400c0ac784bac2cba9c3b3068aca76e76825b0e" Jan 21 19:17:05 crc kubenswrapper[4799]: I0121 19:17:05.830907 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-9zrk6" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.217935 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9da45b2f-bff6-4c35-8504-94e7f7cf51ac" path="/var/lib/kubelet/pods/9da45b2f-bff6-4c35-8504-94e7f7cf51ac/volumes" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.300880 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-zwx9k"] Jan 21 19:17:06 crc kubenswrapper[4799]: E0121 19:17:06.301357 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da45b2f-bff6-4c35-8504-94e7f7cf51ac" containerName="container-00" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.301376 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da45b2f-bff6-4c35-8504-94e7f7cf51ac" containerName="container-00" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.301591 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="9da45b2f-bff6-4c35-8504-94e7f7cf51ac" containerName="container-00" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.303503 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.401343 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cczkv\" (UniqueName: \"kubernetes.io/projected/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-kube-api-access-cczkv\") pod \"crc-debug-zwx9k\" (UID: \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\") " pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.401726 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-host\") pod \"crc-debug-zwx9k\" (UID: \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\") " pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.503580 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-host\") pod \"crc-debug-zwx9k\" (UID: \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\") " pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.503775 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cczkv\" (UniqueName: \"kubernetes.io/projected/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-kube-api-access-cczkv\") pod \"crc-debug-zwx9k\" (UID: \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\") " pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.503777 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-host\") pod \"crc-debug-zwx9k\" (UID: \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\") " pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.521996 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cczkv\" (UniqueName: \"kubernetes.io/projected/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-kube-api-access-cczkv\") pod \"crc-debug-zwx9k\" (UID: \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\") " pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.627018 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:06 crc kubenswrapper[4799]: I0121 19:17:06.841811 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" event={"ID":"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2","Type":"ContainerStarted","Data":"bd5e687238ddf2842a31071cb099f25476fa553acc946b48abd53b7dbf9a6b25"} Jan 21 19:17:07 crc kubenswrapper[4799]: I0121 19:17:07.205537 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:17:07 crc kubenswrapper[4799]: E0121 19:17:07.205910 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:17:07 crc kubenswrapper[4799]: I0121 19:17:07.852593 4799 generic.go:334] "Generic (PLEG): container finished" podID="14ffda4e-6838-4b5a-aa46-15dd3ddee5b2" containerID="6074dd1f09c3a0aac68048aa7fb66c7bd3bb8f0101f30c080cb2bc11b3134243" exitCode=0 Jan 21 19:17:07 crc kubenswrapper[4799]: I0121 19:17:07.852741 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" event={"ID":"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2","Type":"ContainerDied","Data":"6074dd1f09c3a0aac68048aa7fb66c7bd3bb8f0101f30c080cb2bc11b3134243"} Jan 21 19:17:08 crc kubenswrapper[4799]: I0121 19:17:08.971306 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.058834 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cczkv\" (UniqueName: \"kubernetes.io/projected/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-kube-api-access-cczkv\") pod \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\" (UID: \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\") " Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.058911 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-host\") pod \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\" (UID: \"14ffda4e-6838-4b5a-aa46-15dd3ddee5b2\") " Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.059315 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-host" (OuterVolumeSpecName: "host") pod "14ffda4e-6838-4b5a-aa46-15dd3ddee5b2" (UID: "14ffda4e-6838-4b5a-aa46-15dd3ddee5b2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.059751 4799 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-host\") on node \"crc\" DevicePath \"\"" Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.072598 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-kube-api-access-cczkv" (OuterVolumeSpecName: "kube-api-access-cczkv") pod "14ffda4e-6838-4b5a-aa46-15dd3ddee5b2" (UID: "14ffda4e-6838-4b5a-aa46-15dd3ddee5b2"). InnerVolumeSpecName "kube-api-access-cczkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.164279 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cczkv\" (UniqueName: \"kubernetes.io/projected/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2-kube-api-access-cczkv\") on node \"crc\" DevicePath \"\"" Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.685625 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-zwx9k"] Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.698484 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-zwx9k"] Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.874505 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd5e687238ddf2842a31071cb099f25476fa553acc946b48abd53b7dbf9a6b25" Jan 21 19:17:09 crc kubenswrapper[4799]: I0121 19:17:09.874592 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-zwx9k" Jan 21 19:17:10 crc kubenswrapper[4799]: I0121 19:17:10.216824 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14ffda4e-6838-4b5a-aa46-15dd3ddee5b2" path="/var/lib/kubelet/pods/14ffda4e-6838-4b5a-aa46-15dd3ddee5b2/volumes" Jan 21 19:17:10 crc kubenswrapper[4799]: I0121 19:17:10.879022 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-qn9mh"] Jan 21 19:17:10 crc kubenswrapper[4799]: E0121 19:17:10.879539 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ffda4e-6838-4b5a-aa46-15dd3ddee5b2" containerName="container-00" Jan 21 19:17:10 crc kubenswrapper[4799]: I0121 19:17:10.879562 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ffda4e-6838-4b5a-aa46-15dd3ddee5b2" containerName="container-00" Jan 21 19:17:10 crc kubenswrapper[4799]: I0121 19:17:10.879773 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ffda4e-6838-4b5a-aa46-15dd3ddee5b2" containerName="container-00" Jan 21 19:17:10 crc kubenswrapper[4799]: I0121 19:17:10.895973 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.006735 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6ztc\" (UniqueName: \"kubernetes.io/projected/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-kube-api-access-s6ztc\") pod \"crc-debug-qn9mh\" (UID: \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\") " pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.006819 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-host\") pod \"crc-debug-qn9mh\" (UID: \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\") " pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.109583 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6ztc\" (UniqueName: \"kubernetes.io/projected/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-kube-api-access-s6ztc\") pod \"crc-debug-qn9mh\" (UID: \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\") " pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.109672 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-host\") pod \"crc-debug-qn9mh\" (UID: \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\") " pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.109838 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-host\") pod \"crc-debug-qn9mh\" (UID: \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\") " pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.133385 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6ztc\" (UniqueName: \"kubernetes.io/projected/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-kube-api-access-s6ztc\") pod \"crc-debug-qn9mh\" (UID: \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\") " pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.219633 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:11 crc kubenswrapper[4799]: W0121 19:17:11.258448 4799 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2274b78_bd8a_4bc2_b8a5_beaae85b8be2.slice/crio-25b07374c3581d36c601f7b3b534f557f3485ee4919cca08455aead1b3e73f96 WatchSource:0}: Error finding container 25b07374c3581d36c601f7b3b534f557f3485ee4919cca08455aead1b3e73f96: Status 404 returned error can't find the container with id 25b07374c3581d36c601f7b3b534f557f3485ee4919cca08455aead1b3e73f96 Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.907717 4799 generic.go:334] "Generic (PLEG): container finished" podID="d2274b78-bd8a-4bc2-b8a5-beaae85b8be2" containerID="0d4aa814e24726faaa166ab3a9c8cf40815f2ad364a383140888a8eda82a97c0" exitCode=0 Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.907761 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" event={"ID":"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2","Type":"ContainerDied","Data":"0d4aa814e24726faaa166ab3a9c8cf40815f2ad364a383140888a8eda82a97c0"} Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.907787 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" event={"ID":"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2","Type":"ContainerStarted","Data":"25b07374c3581d36c601f7b3b534f557f3485ee4919cca08455aead1b3e73f96"} Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.954004 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-qn9mh"] Jan 21 19:17:11 crc kubenswrapper[4799]: I0121 19:17:11.964377 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-g6g7f/crc-debug-qn9mh"] Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.036881 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.050641 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6ztc\" (UniqueName: \"kubernetes.io/projected/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-kube-api-access-s6ztc\") pod \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\" (UID: \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\") " Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.050780 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-host\") pod \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\" (UID: \"d2274b78-bd8a-4bc2-b8a5-beaae85b8be2\") " Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.050895 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-host" (OuterVolumeSpecName: "host") pod "d2274b78-bd8a-4bc2-b8a5-beaae85b8be2" (UID: "d2274b78-bd8a-4bc2-b8a5-beaae85b8be2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.051457 4799 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-host\") on node \"crc\" DevicePath \"\"" Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.056887 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-kube-api-access-s6ztc" (OuterVolumeSpecName: "kube-api-access-s6ztc") pod "d2274b78-bd8a-4bc2-b8a5-beaae85b8be2" (UID: "d2274b78-bd8a-4bc2-b8a5-beaae85b8be2"). InnerVolumeSpecName "kube-api-access-s6ztc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.153727 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6ztc\" (UniqueName: \"kubernetes.io/projected/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2-kube-api-access-s6ztc\") on node \"crc\" DevicePath \"\"" Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.929794 4799 scope.go:117] "RemoveContainer" containerID="0d4aa814e24726faaa166ab3a9c8cf40815f2ad364a383140888a8eda82a97c0" Jan 21 19:17:13 crc kubenswrapper[4799]: I0121 19:17:13.930061 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/crc-debug-qn9mh" Jan 21 19:17:14 crc kubenswrapper[4799]: I0121 19:17:14.220553 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2274b78-bd8a-4bc2-b8a5-beaae85b8be2" path="/var/lib/kubelet/pods/d2274b78-bd8a-4bc2-b8a5-beaae85b8be2/volumes" Jan 21 19:17:20 crc kubenswrapper[4799]: I0121 19:17:20.204896 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:17:20 crc kubenswrapper[4799]: E0121 19:17:20.205679 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:17:33 crc kubenswrapper[4799]: I0121 19:17:33.204904 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:17:33 crc kubenswrapper[4799]: E0121 19:17:33.205686 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:17:33 crc kubenswrapper[4799]: I0121 19:17:33.633887 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-749b6794b5-k8rw7" podUID="f7542699-9beb-4966-b1e4-b3c3cb9b42ff" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Jan 21 19:17:48 crc kubenswrapper[4799]: I0121 19:17:48.210459 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:17:48 crc kubenswrapper[4799]: E0121 19:17:48.211306 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:17:57 crc kubenswrapper[4799]: I0121 19:17:57.417673 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-cfcccc69b-6zwk4_f4c9e3bf-79dd-49d5-af90-db5a6087f0f3/barbican-api/0.log" Jan 21 19:17:57 crc kubenswrapper[4799]: I0121 19:17:57.589044 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-cfcccc69b-6zwk4_f4c9e3bf-79dd-49d5-af90-db5a6087f0f3/barbican-api-log/0.log" Jan 21 19:17:57 crc kubenswrapper[4799]: I0121 19:17:57.642588 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6b9d59f6f8-vl6sn_9bb30a38-ea0d-4580-9a41-326f00b5c149/barbican-keystone-listener/0.log" Jan 21 19:17:57 crc kubenswrapper[4799]: I0121 19:17:57.806466 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6b9d59f6f8-vl6sn_9bb30a38-ea0d-4580-9a41-326f00b5c149/barbican-keystone-listener-log/0.log" Jan 21 19:17:57 crc kubenswrapper[4799]: I0121 19:17:57.843436 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f979ff5f7-qvdts_c7dc5147-addd-46d9-b5b3-3f328c0a5a94/barbican-worker/0.log" Jan 21 19:17:58 crc kubenswrapper[4799]: I0121 19:17:58.029826 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f979ff5f7-qvdts_c7dc5147-addd-46d9-b5b3-3f328c0a5a94/barbican-worker-log/0.log" Jan 21 19:17:58 crc kubenswrapper[4799]: I0121 19:17:58.095345 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-p2dbp_7f2d9e34-479a-44ae-b64e-55baf5645dfc/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:17:58 crc kubenswrapper[4799]: I0121 19:17:58.567921 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_04163938-d340-4731-82c4-e01a636b7df2/ceilometer-notification-agent/0.log" Jan 21 19:17:58 crc kubenswrapper[4799]: I0121 19:17:58.619878 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_04163938-d340-4731-82c4-e01a636b7df2/proxy-httpd/0.log" Jan 21 19:17:58 crc kubenswrapper[4799]: I0121 19:17:58.663211 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_04163938-d340-4731-82c4-e01a636b7df2/ceilometer-central-agent/0.log" Jan 21 19:17:58 crc kubenswrapper[4799]: I0121 19:17:58.724204 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_04163938-d340-4731-82c4-e01a636b7df2/sg-core/0.log" Jan 21 19:17:58 crc kubenswrapper[4799]: I0121 19:17:58.951217 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e8bf54a9-bf9c-47d1-b391-b73c0055c935/cinder-api-log/0.log" Jan 21 19:17:59 crc kubenswrapper[4799]: I0121 19:17:59.353579 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_55c0bc68-cae8-4eee-9caf-37f8a26c76f9/probe/0.log" Jan 21 19:17:59 crc kubenswrapper[4799]: I0121 19:17:59.496740 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_55c0bc68-cae8-4eee-9caf-37f8a26c76f9/cinder-backup/0.log" Jan 21 19:17:59 crc kubenswrapper[4799]: I0121 19:17:59.599569 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e8bf54a9-bf9c-47d1-b391-b73c0055c935/cinder-api/0.log" Jan 21 19:17:59 crc kubenswrapper[4799]: I0121 19:17:59.636966 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_eb283f72-f347-49c6-9813-6fece9fc70da/cinder-scheduler/0.log" Jan 21 19:17:59 crc kubenswrapper[4799]: I0121 19:17:59.692537 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_eb283f72-f347-49c6-9813-6fece9fc70da/probe/0.log" Jan 21 19:17:59 crc kubenswrapper[4799]: I0121 19:17:59.938180 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_0658bd3c-d1f4-486d-957a-38f4eb9ccc10/probe/0.log" Jan 21 19:18:00 crc kubenswrapper[4799]: I0121 19:18:00.100733 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-0_0658bd3c-d1f4-486d-957a-38f4eb9ccc10/cinder-volume/0.log" Jan 21 19:18:00 crc kubenswrapper[4799]: I0121 19:18:00.249958 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_82503b8b-9773-4e14-9703-663675725aa9/probe/0.log" Jan 21 19:18:00 crc kubenswrapper[4799]: I0121 19:18:00.327708 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-nfs-2-0_82503b8b-9773-4e14-9703-663675725aa9/cinder-volume/0.log" Jan 21 19:18:00 crc kubenswrapper[4799]: I0121 19:18:00.383538 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-bz2f6_8cea4a3f-5c0a-4e15-a62d-64798a8f818e/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:00 crc kubenswrapper[4799]: I0121 19:18:00.618772 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-h9r9p_d427281b-c110-468f-b056-78a91049bcd4/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:00 crc kubenswrapper[4799]: I0121 19:18:00.636927 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59899cb9c-whmhs_41441182-ee7b-46da-9f86-975ad9b22777/init/0.log" Jan 21 19:18:00 crc kubenswrapper[4799]: I0121 19:18:00.867795 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59899cb9c-whmhs_41441182-ee7b-46da-9f86-975ad9b22777/init/0.log" Jan 21 19:18:00 crc kubenswrapper[4799]: I0121 19:18:00.973685 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-8nd4d_0bf0a460-fd95-41ad-b7a3-8f3fb4500db7/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:01 crc kubenswrapper[4799]: I0121 19:18:01.092874 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-59899cb9c-whmhs_41441182-ee7b-46da-9f86-975ad9b22777/dnsmasq-dns/0.log" Jan 21 19:18:01 crc kubenswrapper[4799]: I0121 19:18:01.243425 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_486557f2-139f-4297-8c6c-9dc9ed6f5cdc/glance-httpd/0.log" Jan 21 19:18:01 crc kubenswrapper[4799]: I0121 19:18:01.270061 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_486557f2-139f-4297-8c6c-9dc9ed6f5cdc/glance-log/0.log" Jan 21 19:18:01 crc kubenswrapper[4799]: I0121 19:18:01.450487 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_5152eb20-55f2-4c0c-9a8b-6b1e9043abf9/glance-httpd/0.log" Jan 21 19:18:01 crc kubenswrapper[4799]: I0121 19:18:01.522538 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_5152eb20-55f2-4c0c-9a8b-6b1e9043abf9/glance-log/0.log" Jan 21 19:18:01 crc kubenswrapper[4799]: I0121 19:18:01.745870 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-phxp5_7e239b6d-2469-4d29-b1e1-72b1d7916ada/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:01 crc kubenswrapper[4799]: I0121 19:18:01.765113 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-585ff694b6-5fph4_b8391139-71cc-48bb-af31-242cebaea8de/horizon/0.log" Jan 21 19:18:02 crc kubenswrapper[4799]: I0121 19:18:02.006193 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-qknml_0d9cee91-78fe-4816-a3dc-db90e98bcddd/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:02 crc kubenswrapper[4799]: I0121 19:18:02.231672 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29483641-hhgjf_e4a59182-8711-4086-b753-a527b88f464b/keystone-cron/0.log" Jan 21 19:18:02 crc kubenswrapper[4799]: I0121 19:18:02.380638 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29483701-6vvv2_7295fe64-03cb-4766-934e-cbd3eace9c00/keystone-cron/0.log" Jan 21 19:18:02 crc kubenswrapper[4799]: I0121 19:18:02.578155 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_dab8cf8d-7956-4cfb-a107-0e15661fc5f7/kube-state-metrics/0.log" Jan 21 19:18:02 crc kubenswrapper[4799]: I0121 19:18:02.686212 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-86999674c5-gpgq6_4cc3ff02-feee-4b55-a057-99380b99a10e/keystone-api/0.log" Jan 21 19:18:02 crc kubenswrapper[4799]: I0121 19:18:02.696760 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-585ff694b6-5fph4_b8391139-71cc-48bb-af31-242cebaea8de/horizon-log/0.log" Jan 21 19:18:02 crc kubenswrapper[4799]: I0121 19:18:02.811480 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-t9kcx_34ff08b0-f833-4c42-90a7-68af92ba7ce8/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:03 crc kubenswrapper[4799]: I0121 19:18:03.205593 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:18:03 crc kubenswrapper[4799]: E0121 19:18:03.205873 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:18:03 crc kubenswrapper[4799]: I0121 19:18:03.533354 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7644966657-gcssj_95553e27-38f3-4a0d-a382-d87410ca7ec3/neutron-httpd/0.log" Jan 21 19:18:03 crc kubenswrapper[4799]: I0121 19:18:03.629255 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-nrb9s_f5e75302-b14f-4281-93e3-a40bff4bcafa/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:03 crc kubenswrapper[4799]: I0121 19:18:03.706117 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7644966657-gcssj_95553e27-38f3-4a0d-a382-d87410ca7ec3/neutron-api/0.log" Jan 21 19:18:04 crc kubenswrapper[4799]: I0121 19:18:04.320300 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_d21651e9-1ffb-472f-8c41-652621413b50/nova-cell0-conductor-conductor/0.log" Jan 21 19:18:04 crc kubenswrapper[4799]: I0121 19:18:04.762654 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_ff6ad04e-2495-4af0-b908-ee65bb277ebc/nova-cell1-conductor-conductor/0.log" Jan 21 19:18:05 crc kubenswrapper[4799]: I0121 19:18:05.026701 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_1fb95d61-c96e-4f03-88cb-b22b8fc6c5b9/nova-cell1-novncproxy-novncproxy/0.log" Jan 21 19:18:05 crc kubenswrapper[4799]: I0121 19:18:05.332053 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d954dc98-6a6a-49b8-976c-b668619adcff/nova-api-log/0.log" Jan 21 19:18:05 crc kubenswrapper[4799]: I0121 19:18:05.387076 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-bs6xc_9f3a320d-5663-4a3d-adb0-fa58906a7a91/nova-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:05 crc kubenswrapper[4799]: I0121 19:18:05.648506 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd/nova-metadata-log/0.log" Jan 21 19:18:06 crc kubenswrapper[4799]: I0121 19:18:06.126337 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d954dc98-6a6a-49b8-976c-b668619adcff/nova-api-api/0.log" Jan 21 19:18:06 crc kubenswrapper[4799]: I0121 19:18:06.164178 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e95ea0b2-ade1-4aaa-ad67-b85ebde84afa/mysql-bootstrap/0.log" Jan 21 19:18:06 crc kubenswrapper[4799]: I0121 19:18:06.338203 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_113d1aee-0a9f-47dd-9a33-ab951cab8535/nova-scheduler-scheduler/0.log" Jan 21 19:18:06 crc kubenswrapper[4799]: I0121 19:18:06.371106 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e95ea0b2-ade1-4aaa-ad67-b85ebde84afa/mysql-bootstrap/0.log" Jan 21 19:18:06 crc kubenswrapper[4799]: I0121 19:18:06.429403 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e95ea0b2-ade1-4aaa-ad67-b85ebde84afa/galera/0.log" Jan 21 19:18:06 crc kubenswrapper[4799]: I0121 19:18:06.618473 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_04f9c729-36bb-4aa5-9060-af5b0666b196/mysql-bootstrap/0.log" Jan 21 19:18:06 crc kubenswrapper[4799]: I0121 19:18:06.856861 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_04f9c729-36bb-4aa5-9060-af5b0666b196/mysql-bootstrap/0.log" Jan 21 19:18:06 crc kubenswrapper[4799]: I0121 19:18:06.873258 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_04f9c729-36bb-4aa5-9060-af5b0666b196/galera/0.log" Jan 21 19:18:07 crc kubenswrapper[4799]: I0121 19:18:07.066177 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_ff8dc291-a487-43d0-a494-9496737703ef/openstackclient/0.log" Jan 21 19:18:07 crc kubenswrapper[4799]: I0121 19:18:07.151818 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-68wt5_05213e52-1f99-42a4-b882-4514760063c7/ovn-controller/0.log" Jan 21 19:18:07 crc kubenswrapper[4799]: I0121 19:18:07.371574 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-bxtjr_b117b068-807b-4c10-8c30-46648892f87f/openstack-network-exporter/0.log" Jan 21 19:18:07 crc kubenswrapper[4799]: I0121 19:18:07.835191 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5dwpd_0d199dae-6bd1-48c4-8a95-25ffd4555e29/ovsdb-server-init/0.log" Jan 21 19:18:08 crc kubenswrapper[4799]: I0121 19:18:08.099086 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5dwpd_0d199dae-6bd1-48c4-8a95-25ffd4555e29/ovsdb-server-init/0.log" Jan 21 19:18:08 crc kubenswrapper[4799]: I0121 19:18:08.179703 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5dwpd_0d199dae-6bd1-48c4-8a95-25ffd4555e29/ovsdb-server/0.log" Jan 21 19:18:08 crc kubenswrapper[4799]: I0121 19:18:08.353083 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d97d7ffc-f5b9-4807-b498-1ffdbe02b9cd/nova-metadata-metadata/0.log" Jan 21 19:18:08 crc kubenswrapper[4799]: I0121 19:18:08.453262 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-mrgkt_a1a6a3df-3a95-4614-92f5-25fd585431b5/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:08 crc kubenswrapper[4799]: I0121 19:18:08.500194 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5dwpd_0d199dae-6bd1-48c4-8a95-25ffd4555e29/ovs-vswitchd/0.log" Jan 21 19:18:08 crc kubenswrapper[4799]: I0121 19:18:08.675453 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e983d617-7cd1-416a-8955-c3d755e4a5b0/openstack-network-exporter/0.log" Jan 21 19:18:08 crc kubenswrapper[4799]: I0121 19:18:08.781693 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e983d617-7cd1-416a-8955-c3d755e4a5b0/ovn-northd/0.log" Jan 21 19:18:08 crc kubenswrapper[4799]: I0121 19:18:08.853454 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_58187703-2c52-4f99-8d9a-65306c90c5ed/openstack-network-exporter/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.001331 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_acea8227-6d95-4c5f-bba7-8e954701de28/openstack-network-exporter/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.006827 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_58187703-2c52-4f99-8d9a-65306c90c5ed/ovsdbserver-nb/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.126555 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_acea8227-6d95-4c5f-bba7-8e954701de28/ovsdbserver-sb/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.546275 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/init-config-reloader/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.621118 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-55f8df6d54-cffcw_d03c23b6-50c8-4a4b-b2ea-53c4a3010790/placement-api/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.655057 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-55f8df6d54-cffcw_d03c23b6-50c8-4a4b-b2ea-53c4a3010790/placement-log/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.829965 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/config-reloader/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.868683 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/init-config-reloader/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.874974 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/prometheus/0.log" Jan 21 19:18:09 crc kubenswrapper[4799]: I0121 19:18:09.942774 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_5e364896-23a0-4e1e-9e15-7f637ee5326c/thanos-sidecar/0.log" Jan 21 19:18:10 crc kubenswrapper[4799]: I0121 19:18:10.143814 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_88d1e166-bb2f-473e-a955-e79c6251a580/setup-container/0.log" Jan 21 19:18:10 crc kubenswrapper[4799]: I0121 19:18:10.427527 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_88d1e166-bb2f-473e-a955-e79c6251a580/rabbitmq/0.log" Jan 21 19:18:10 crc kubenswrapper[4799]: I0121 19:18:10.441500 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_88d1e166-bb2f-473e-a955-e79c6251a580/setup-container/0.log" Jan 21 19:18:10 crc kubenswrapper[4799]: I0121 19:18:10.499989 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_63677f61-4283-417a-bcf7-303840452589/setup-container/0.log" Jan 21 19:18:10 crc kubenswrapper[4799]: I0121 19:18:10.732964 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_63677f61-4283-417a-bcf7-303840452589/setup-container/0.log" Jan 21 19:18:10 crc kubenswrapper[4799]: I0121 19:18:10.775654 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_63677f61-4283-417a-bcf7-303840452589/rabbitmq/0.log" Jan 21 19:18:10 crc kubenswrapper[4799]: I0121 19:18:10.838046 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_135b6a22-006b-4270-a559-39fc323570b2/setup-container/0.log" Jan 21 19:18:10 crc kubenswrapper[4799]: I0121 19:18:10.985518 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_135b6a22-006b-4270-a559-39fc323570b2/setup-container/0.log" Jan 21 19:18:11 crc kubenswrapper[4799]: I0121 19:18:11.058393 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_135b6a22-006b-4270-a559-39fc323570b2/rabbitmq/0.log" Jan 21 19:18:11 crc kubenswrapper[4799]: I0121 19:18:11.116027 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-cfhnf_947392cf-f31e-4cc3-85b9-3fcf86b289ef/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:11 crc kubenswrapper[4799]: I0121 19:18:11.396759 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-f8tvb_7d25ce2c-aa9c-4cbe-bca0-2b1f7cd0ec7d/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:11 crc kubenswrapper[4799]: I0121 19:18:11.473118 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-xs8kd_509437ec-6d22-4843-accb-db316692f6c9/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:11 crc kubenswrapper[4799]: I0121 19:18:11.731280 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-2sm5r_1ea44035-c1c7-45f2-921a-bf2d91a9a7d8/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:11 crc kubenswrapper[4799]: I0121 19:18:11.770686 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-d92xl_a94b72a4-75d5-427c-86ab-014f1f9ee0a2/ssh-known-hosts-edpm-deployment/0.log" Jan 21 19:18:12 crc kubenswrapper[4799]: I0121 19:18:12.218303 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-749b6794b5-k8rw7_f7542699-9beb-4966-b1e4-b3c3cb9b42ff/proxy-server/0.log" Jan 21 19:18:12 crc kubenswrapper[4799]: I0121 19:18:12.512063 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-749b6794b5-k8rw7_f7542699-9beb-4966-b1e4-b3c3cb9b42ff/proxy-httpd/0.log" Jan 21 19:18:12 crc kubenswrapper[4799]: I0121 19:18:12.523913 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-72p7x_a9b76963-d66a-43b7-9f1a-ef2a18ef6d02/swift-ring-rebalance/0.log" Jan 21 19:18:12 crc kubenswrapper[4799]: I0121 19:18:12.594562 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/account-auditor/0.log" Jan 21 19:18:12 crc kubenswrapper[4799]: I0121 19:18:12.756527 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/account-reaper/0.log" Jan 21 19:18:12 crc kubenswrapper[4799]: I0121 19:18:12.823492 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/account-server/0.log" Jan 21 19:18:12 crc kubenswrapper[4799]: I0121 19:18:12.833294 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/account-replicator/0.log" Jan 21 19:18:12 crc kubenswrapper[4799]: I0121 19:18:12.875389 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/container-auditor/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.065491 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/container-server/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.066149 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/container-replicator/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.096418 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/container-updater/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.228517 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-auditor/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.327060 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-expirer/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.359143 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-server/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.396825 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-replicator/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.487261 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/object-updater/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.557351 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/rsync/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.722012 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_771ea47a-76eb-434d-ac1f-cf6048f08237/swift-recon-cron/0.log" Jan 21 19:18:13 crc kubenswrapper[4799]: I0121 19:18:13.815248 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-lg6qn_b5f5c54c-325e-4640-8cb5-5f8ac5c91234/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:14 crc kubenswrapper[4799]: I0121 19:18:14.025690 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_384bc0b0-0caa-45e3-b892-155def4ed881/tempest-tests-tempest-tests-runner/0.log" Jan 21 19:18:14 crc kubenswrapper[4799]: I0121 19:18:14.142349 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_254949e9-614e-419a-ba47-42bf8850d001/test-operator-logs-container/0.log" Jan 21 19:18:14 crc kubenswrapper[4799]: I0121 19:18:14.291528 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-cxws2_b2bf4e9d-98ab-403e-8275-ac50c1b2c108/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Jan 21 19:18:15 crc kubenswrapper[4799]: I0121 19:18:15.171874 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_c37f7c3a-832c-4991-9fe0-6e923befb599/watcher-applier/0.log" Jan 21 19:18:15 crc kubenswrapper[4799]: I0121 19:18:15.879426 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_40368171-ea6d-4ab3-a1de-33204529aab4/watcher-api-log/0.log" Jan 21 19:18:16 crc kubenswrapper[4799]: I0121 19:18:16.205458 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:18:16 crc kubenswrapper[4799]: E0121 19:18:16.205811 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:18:18 crc kubenswrapper[4799]: I0121 19:18:18.830153 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_30f6abc1-fcdc-4901-9e88-3b6c5fd2a223/watcher-decision-engine/0.log" Jan 21 19:18:19 crc kubenswrapper[4799]: I0121 19:18:19.709963 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c75e85a7-0869-4fe4-ba35-d51f6107027c/memcached/0.log" Jan 21 19:18:20 crc kubenswrapper[4799]: I0121 19:18:20.406263 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_40368171-ea6d-4ab3-a1de-33204529aab4/watcher-api/0.log" Jan 21 19:18:29 crc kubenswrapper[4799]: I0121 19:18:29.207333 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:18:29 crc kubenswrapper[4799]: I0121 19:18:29.828010 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"c40775b8a431be9bcc384aad4a591d1cfadc9d5fcac27070146e3f779cf50aab"} Jan 21 19:18:45 crc kubenswrapper[4799]: I0121 19:18:45.479490 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/util/0.log" Jan 21 19:18:45 crc kubenswrapper[4799]: I0121 19:18:45.729161 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/pull/0.log" Jan 21 19:18:45 crc kubenswrapper[4799]: I0121 19:18:45.742481 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/pull/0.log" Jan 21 19:18:45 crc kubenswrapper[4799]: I0121 19:18:45.789740 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/util/0.log" Jan 21 19:18:46 crc kubenswrapper[4799]: I0121 19:18:46.056819 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/extract/0.log" Jan 21 19:18:46 crc kubenswrapper[4799]: I0121 19:18:46.086321 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/pull/0.log" Jan 21 19:18:46 crc kubenswrapper[4799]: I0121 19:18:46.169071 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0b164c49815bafb4b2c7942fb14fdc69876a4f876002193d74d81456a8w4bns_ba033264-742d-42f8-b688-5d0f8a853360/util/0.log" Jan 21 19:18:46 crc kubenswrapper[4799]: I0121 19:18:46.401035 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7ddb5c749-xgqnz_99a0338e-5d7f-47cd-a30f-8c57ab921724/manager/0.log" Jan 21 19:18:46 crc kubenswrapper[4799]: I0121 19:18:46.448661 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-9b68f5989-6gbp4_aa887ea8-0375-49c1-b802-9b3c8468fa87/manager/0.log" Jan 21 19:18:46 crc kubenswrapper[4799]: I0121 19:18:46.689832 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-9f958b845-2qd6s_0e8e19fd-c988-48ce-9150-1b46974bd86e/manager/0.log" Jan 21 19:18:46 crc kubenswrapper[4799]: I0121 19:18:46.691777 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-c6994669c-msz6d_10ffe97a-fa49-481f-9e79-55627ab24692/manager/0.log" Jan 21 19:18:47 crc kubenswrapper[4799]: I0121 19:18:47.144932 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-tslfv_70fddebf-b616-47bd-a139-d2a4999624dd/manager/0.log" Jan 21 19:18:47 crc kubenswrapper[4799]: I0121 19:18:47.185423 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-ff7f6_ac9f205a-3d30-4ca3-b253-32c441466211/manager/0.log" Jan 21 19:18:47 crc kubenswrapper[4799]: I0121 19:18:47.478350 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-78757b4889-75r9k_3a0e1cc6-500f-4493-8a18-0eeea206a4f7/manager/0.log" Jan 21 19:18:47 crc kubenswrapper[4799]: I0121 19:18:47.660941 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-77c48c7859-ffgnr_7654ac1c-746c-46e6-b276-e9f6a839a187/manager/0.log" Jan 21 19:18:47 crc kubenswrapper[4799]: I0121 19:18:47.712345 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-767fdc4f47-rw2zn_223724ab-b9ee-4f55-b1ab-bf730a6314f9/manager/0.log" Jan 21 19:18:47 crc kubenswrapper[4799]: I0121 19:18:47.755078 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-864f6b75bf-4ttvx_f90c4327-642d-4efd-90d3-7d3b83dbcfc9/manager/0.log" Jan 21 19:18:47 crc kubenswrapper[4799]: I0121 19:18:47.945096 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-bz4tf_b35d565f-4d9f-437a-add9-8ef40d891e99/manager/0.log" Jan 21 19:18:48 crc kubenswrapper[4799]: I0121 19:18:48.017496 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-cb4666565-rwglk_ed464d3c-bdd7-4b19-a332-402ddeccb65b/manager/0.log" Jan 21 19:18:48 crc kubenswrapper[4799]: I0121 19:18:48.194689 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-65849867d6-v552f_11e4c63f-cdc3-4d50-a4e7-03386747ca86/manager/0.log" Jan 21 19:18:48 crc kubenswrapper[4799]: I0121 19:18:48.259275 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7fc9b76cf6-9wst2_b3ba3b28-4c9a-48f6-a914-5a125e4ef7f1/manager/0.log" Jan 21 19:18:48 crc kubenswrapper[4799]: I0121 19:18:48.361459 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854nht6t_4d7fd266-ebc9-46f2-9355-4dac2699822c/manager/0.log" Jan 21 19:18:48 crc kubenswrapper[4799]: I0121 19:18:48.592334 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-6664d49b67-ncnqt_35ba5cb8-8f17-4e8d-bd30-28912c4fbe0d/operator/0.log" Jan 21 19:18:48 crc kubenswrapper[4799]: I0121 19:18:48.822229 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-kwk6b_4ef61afc-f214-4ffd-875e-b8c8dfb2426e/registry-server/0.log" Jan 21 19:18:49 crc kubenswrapper[4799]: I0121 19:18:49.089637 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-7nxnh_92a8c35c-6ef9-4453-9233-df8579764cd2/manager/0.log" Jan 21 19:18:49 crc kubenswrapper[4799]: I0121 19:18:49.276590 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-686df47fcb-2b24b_e898f43a-2487-48f8-9615-f02fdbd9eb30/manager/0.log" Jan 21 19:18:49 crc kubenswrapper[4799]: I0121 19:18:49.498972 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-hnnwf_3e5a60e4-5801-4273-a08a-20907c8bed09/operator/0.log" Jan 21 19:18:49 crc kubenswrapper[4799]: I0121 19:18:49.733740 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-85dd56d4cc-dqwbg_d9e14e78-dbbf-4941-9abb-5e2f8eb1fc2f/manager/0.log" Jan 21 19:18:49 crc kubenswrapper[4799]: I0121 19:18:49.889084 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7ffc46955b-5t4q2_f0bcc23c-7399-4a1f-a91b-f643eaee6e60/manager/0.log" Jan 21 19:18:50 crc kubenswrapper[4799]: I0121 19:18:50.027954 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5f8f495fcf-n2lq2_a2536e9b-8292-474d-ae06-00e4721120b3/manager/0.log" Jan 21 19:18:50 crc kubenswrapper[4799]: I0121 19:18:50.082267 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-7cd8bc9dbb-v4wk5_44bcdd0c-5b20-4387-a105-c8f3fb661a6f/manager/0.log" Jan 21 19:18:50 crc kubenswrapper[4799]: I0121 19:18:50.200892 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-65f5896948-jrzsz_822a89c1-0086-49f1-9bee-6ac87a2af52a/manager/0.log" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.711398 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-whkcj"] Jan 21 19:19:11 crc kubenswrapper[4799]: E0121 19:19:11.712486 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2274b78-bd8a-4bc2-b8a5-beaae85b8be2" containerName="container-00" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.712506 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2274b78-bd8a-4bc2-b8a5-beaae85b8be2" containerName="container-00" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.712751 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2274b78-bd8a-4bc2-b8a5-beaae85b8be2" containerName="container-00" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.714246 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.728838 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-whkcj"] Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.836141 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-nzghk_dd4d0095-1e20-4fcf-937f-1351374f36c6/control-plane-machine-set-operator/0.log" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.864767 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-catalog-content\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.864830 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cqvw\" (UniqueName: \"kubernetes.io/projected/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-kube-api-access-5cqvw\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.864874 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-utilities\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.967752 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-catalog-content\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.967818 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cqvw\" (UniqueName: \"kubernetes.io/projected/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-kube-api-access-5cqvw\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.967851 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-utilities\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.968313 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-catalog-content\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.968356 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-utilities\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:11 crc kubenswrapper[4799]: I0121 19:19:11.988158 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cqvw\" (UniqueName: \"kubernetes.io/projected/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-kube-api-access-5cqvw\") pod \"community-operators-whkcj\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.037735 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.165729 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rk6k5_38e1abaa-9da0-4924-a6b5-ee9617cf304d/kube-rbac-proxy/0.log" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.214527 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-rk6k5_38e1abaa-9da0-4924-a6b5-ee9617cf304d/machine-api-operator/0.log" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.697135 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-whkcj"] Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.710581 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nv86b"] Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.712723 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.741188 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nv86b"] Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.786432 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-catalog-content\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.786493 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-utilities\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.786551 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzjxg\" (UniqueName: \"kubernetes.io/projected/c05a837c-3d12-486b-994f-303ddfe81b4f-kube-api-access-tzjxg\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.888237 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-catalog-content\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.888283 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-utilities\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.888331 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzjxg\" (UniqueName: \"kubernetes.io/projected/c05a837c-3d12-486b-994f-303ddfe81b4f-kube-api-access-tzjxg\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.888866 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-utilities\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.889119 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-catalog-content\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:12 crc kubenswrapper[4799]: I0121 19:19:12.911224 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzjxg\" (UniqueName: \"kubernetes.io/projected/c05a837c-3d12-486b-994f-303ddfe81b4f-kube-api-access-tzjxg\") pod \"certified-operators-nv86b\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:13 crc kubenswrapper[4799]: I0121 19:19:13.076198 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:13 crc kubenswrapper[4799]: I0121 19:19:13.276076 4799 generic.go:334] "Generic (PLEG): container finished" podID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerID="230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a" exitCode=0 Jan 21 19:19:13 crc kubenswrapper[4799]: I0121 19:19:13.276173 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-whkcj" event={"ID":"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb","Type":"ContainerDied","Data":"230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a"} Jan 21 19:19:13 crc kubenswrapper[4799]: I0121 19:19:13.276215 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-whkcj" event={"ID":"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb","Type":"ContainerStarted","Data":"a006dc8170ba347cb9a1f60c4a8708652182bfd8144886211d1ba5f7a1754085"} Jan 21 19:19:13 crc kubenswrapper[4799]: I0121 19:19:13.280540 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 19:19:13 crc kubenswrapper[4799]: I0121 19:19:13.648238 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nv86b"] Jan 21 19:19:14 crc kubenswrapper[4799]: I0121 19:19:14.286091 4799 generic.go:334] "Generic (PLEG): container finished" podID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerID="8bac53a15a6164f3cdae6ec21901470d0f2b3e3add96cc2b46ea3f6ddf119b3b" exitCode=0 Jan 21 19:19:14 crc kubenswrapper[4799]: I0121 19:19:14.286470 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv86b" event={"ID":"c05a837c-3d12-486b-994f-303ddfe81b4f","Type":"ContainerDied","Data":"8bac53a15a6164f3cdae6ec21901470d0f2b3e3add96cc2b46ea3f6ddf119b3b"} Jan 21 19:19:14 crc kubenswrapper[4799]: I0121 19:19:14.286507 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv86b" event={"ID":"c05a837c-3d12-486b-994f-303ddfe81b4f","Type":"ContainerStarted","Data":"dfe3ea3c607c69460546e3edcf2163c632f608b07c73c515649139deea8e912d"} Jan 21 19:19:14 crc kubenswrapper[4799]: I0121 19:19:14.291244 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-whkcj" event={"ID":"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb","Type":"ContainerStarted","Data":"288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087"} Jan 21 19:19:15 crc kubenswrapper[4799]: I0121 19:19:15.302386 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv86b" event={"ID":"c05a837c-3d12-486b-994f-303ddfe81b4f","Type":"ContainerStarted","Data":"bc572081f75739fef7318591cfb7ac2bdfb35f1e1faf0dc5263f6bd353abbdaf"} Jan 21 19:19:15 crc kubenswrapper[4799]: I0121 19:19:15.304919 4799 generic.go:334] "Generic (PLEG): container finished" podID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerID="288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087" exitCode=0 Jan 21 19:19:15 crc kubenswrapper[4799]: I0121 19:19:15.304948 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-whkcj" event={"ID":"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb","Type":"ContainerDied","Data":"288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087"} Jan 21 19:19:16 crc kubenswrapper[4799]: I0121 19:19:16.317407 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-whkcj" event={"ID":"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb","Type":"ContainerStarted","Data":"a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b"} Jan 21 19:19:16 crc kubenswrapper[4799]: I0121 19:19:16.346294 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-whkcj" podStartSLOduration=2.6780755320000003 podStartE2EDuration="5.34627554s" podCreationTimestamp="2026-01-21 19:19:11 +0000 UTC" firstStartedPulling="2026-01-21 19:19:13.28030771 +0000 UTC m=+6379.906597733" lastFinishedPulling="2026-01-21 19:19:15.948507718 +0000 UTC m=+6382.574797741" observedRunningTime="2026-01-21 19:19:16.338305605 +0000 UTC m=+6382.964595648" watchObservedRunningTime="2026-01-21 19:19:16.34627554 +0000 UTC m=+6382.972565553" Jan 21 19:19:17 crc kubenswrapper[4799]: I0121 19:19:17.330884 4799 generic.go:334] "Generic (PLEG): container finished" podID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerID="bc572081f75739fef7318591cfb7ac2bdfb35f1e1faf0dc5263f6bd353abbdaf" exitCode=0 Jan 21 19:19:17 crc kubenswrapper[4799]: I0121 19:19:17.330959 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv86b" event={"ID":"c05a837c-3d12-486b-994f-303ddfe81b4f","Type":"ContainerDied","Data":"bc572081f75739fef7318591cfb7ac2bdfb35f1e1faf0dc5263f6bd353abbdaf"} Jan 21 19:19:18 crc kubenswrapper[4799]: I0121 19:19:18.341590 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv86b" event={"ID":"c05a837c-3d12-486b-994f-303ddfe81b4f","Type":"ContainerStarted","Data":"e7a0e309cf45a770610353995fe6b9a965ef4fc84c6a5ba100184c0f49448c1f"} Jan 21 19:19:18 crc kubenswrapper[4799]: I0121 19:19:18.365158 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nv86b" podStartSLOduration=2.810537247 podStartE2EDuration="6.365121796s" podCreationTimestamp="2026-01-21 19:19:12 +0000 UTC" firstStartedPulling="2026-01-21 19:19:14.288438811 +0000 UTC m=+6380.914728834" lastFinishedPulling="2026-01-21 19:19:17.84302336 +0000 UTC m=+6384.469313383" observedRunningTime="2026-01-21 19:19:18.359470466 +0000 UTC m=+6384.985760499" watchObservedRunningTime="2026-01-21 19:19:18.365121796 +0000 UTC m=+6384.991411839" Jan 21 19:19:22 crc kubenswrapper[4799]: I0121 19:19:22.038465 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:22 crc kubenswrapper[4799]: I0121 19:19:22.038938 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:22 crc kubenswrapper[4799]: I0121 19:19:22.093093 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:22 crc kubenswrapper[4799]: I0121 19:19:22.449968 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:23 crc kubenswrapper[4799]: I0121 19:19:23.077953 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:23 crc kubenswrapper[4799]: I0121 19:19:23.078300 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:23 crc kubenswrapper[4799]: I0121 19:19:23.135271 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:23 crc kubenswrapper[4799]: I0121 19:19:23.454317 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:24 crc kubenswrapper[4799]: I0121 19:19:24.498531 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-whkcj"] Jan 21 19:19:24 crc kubenswrapper[4799]: I0121 19:19:24.498790 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-whkcj" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerName="registry-server" containerID="cri-o://a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b" gracePeriod=2 Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.078910 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.178649 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-catalog-content\") pod \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.179098 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-utilities\") pod \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.179305 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cqvw\" (UniqueName: \"kubernetes.io/projected/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-kube-api-access-5cqvw\") pod \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\" (UID: \"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb\") " Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.179821 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-utilities" (OuterVolumeSpecName: "utilities") pod "f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" (UID: "f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.180183 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.185408 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-kube-api-access-5cqvw" (OuterVolumeSpecName: "kube-api-access-5cqvw") pod "f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" (UID: "f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb"). InnerVolumeSpecName "kube-api-access-5cqvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.232859 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" (UID: "f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.282835 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.282868 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cqvw\" (UniqueName: \"kubernetes.io/projected/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb-kube-api-access-5cqvw\") on node \"crc\" DevicePath \"\"" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.432415 4799 generic.go:334] "Generic (PLEG): container finished" podID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerID="a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b" exitCode=0 Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.432472 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-whkcj" event={"ID":"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb","Type":"ContainerDied","Data":"a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b"} Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.432506 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-whkcj" event={"ID":"f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb","Type":"ContainerDied","Data":"a006dc8170ba347cb9a1f60c4a8708652182bfd8144886211d1ba5f7a1754085"} Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.432530 4799 scope.go:117] "RemoveContainer" containerID="a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.432747 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-whkcj" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.470550 4799 scope.go:117] "RemoveContainer" containerID="288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.476346 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-whkcj"] Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.486908 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-whkcj"] Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.501239 4799 scope.go:117] "RemoveContainer" containerID="230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.541219 4799 scope.go:117] "RemoveContainer" containerID="a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b" Jan 21 19:19:25 crc kubenswrapper[4799]: E0121 19:19:25.543319 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b\": container with ID starting with a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b not found: ID does not exist" containerID="a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.543381 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b"} err="failed to get container status \"a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b\": rpc error: code = NotFound desc = could not find container \"a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b\": container with ID starting with a3cc8304ec9d23fdd2c1fde031bd7dbbaa81ad96b89bfcc199ce98313554674b not found: ID does not exist" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.543420 4799 scope.go:117] "RemoveContainer" containerID="288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087" Jan 21 19:19:25 crc kubenswrapper[4799]: E0121 19:19:25.544002 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087\": container with ID starting with 288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087 not found: ID does not exist" containerID="288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.544057 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087"} err="failed to get container status \"288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087\": rpc error: code = NotFound desc = could not find container \"288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087\": container with ID starting with 288096d1d9a734b51046762990d1f7e9183fd3010d74598805323457f9de1087 not found: ID does not exist" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.544088 4799 scope.go:117] "RemoveContainer" containerID="230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a" Jan 21 19:19:25 crc kubenswrapper[4799]: E0121 19:19:25.544924 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a\": container with ID starting with 230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a not found: ID does not exist" containerID="230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a" Jan 21 19:19:25 crc kubenswrapper[4799]: I0121 19:19:25.544954 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a"} err="failed to get container status \"230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a\": rpc error: code = NotFound desc = could not find container \"230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a\": container with ID starting with 230ac6882d72918b080a7b3c47d10da33fe17d6bd8834c99def572ccfa98b24a not found: ID does not exist" Jan 21 19:19:26 crc kubenswrapper[4799]: I0121 19:19:26.217842 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" path="/var/lib/kubelet/pods/f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb/volumes" Jan 21 19:19:26 crc kubenswrapper[4799]: I0121 19:19:26.904225 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nv86b"] Jan 21 19:19:26 crc kubenswrapper[4799]: I0121 19:19:26.904540 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nv86b" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerName="registry-server" containerID="cri-o://e7a0e309cf45a770610353995fe6b9a965ef4fc84c6a5ba100184c0f49448c1f" gracePeriod=2 Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.459754 4799 generic.go:334] "Generic (PLEG): container finished" podID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerID="e7a0e309cf45a770610353995fe6b9a965ef4fc84c6a5ba100184c0f49448c1f" exitCode=0 Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.459816 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv86b" event={"ID":"c05a837c-3d12-486b-994f-303ddfe81b4f","Type":"ContainerDied","Data":"e7a0e309cf45a770610353995fe6b9a965ef4fc84c6a5ba100184c0f49448c1f"} Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.543705 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.641825 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-utilities\") pod \"c05a837c-3d12-486b-994f-303ddfe81b4f\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.641940 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzjxg\" (UniqueName: \"kubernetes.io/projected/c05a837c-3d12-486b-994f-303ddfe81b4f-kube-api-access-tzjxg\") pod \"c05a837c-3d12-486b-994f-303ddfe81b4f\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.642044 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-catalog-content\") pod \"c05a837c-3d12-486b-994f-303ddfe81b4f\" (UID: \"c05a837c-3d12-486b-994f-303ddfe81b4f\") " Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.642669 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-utilities" (OuterVolumeSpecName: "utilities") pod "c05a837c-3d12-486b-994f-303ddfe81b4f" (UID: "c05a837c-3d12-486b-994f-303ddfe81b4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.648546 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c05a837c-3d12-486b-994f-303ddfe81b4f-kube-api-access-tzjxg" (OuterVolumeSpecName: "kube-api-access-tzjxg") pod "c05a837c-3d12-486b-994f-303ddfe81b4f" (UID: "c05a837c-3d12-486b-994f-303ddfe81b4f"). InnerVolumeSpecName "kube-api-access-tzjxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.689243 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c05a837c-3d12-486b-994f-303ddfe81b4f" (UID: "c05a837c-3d12-486b-994f-303ddfe81b4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.744745 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.745094 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c05a837c-3d12-486b-994f-303ddfe81b4f-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:19:27 crc kubenswrapper[4799]: I0121 19:19:27.745195 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzjxg\" (UniqueName: \"kubernetes.io/projected/c05a837c-3d12-486b-994f-303ddfe81b4f-kube-api-access-tzjxg\") on node \"crc\" DevicePath \"\"" Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.315223 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-ct8cl_48e93168-c733-4355-b1b2-5cfd895ed094/cert-manager-controller/0.log" Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.509725 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv86b" event={"ID":"c05a837c-3d12-486b-994f-303ddfe81b4f","Type":"ContainerDied","Data":"dfe3ea3c607c69460546e3edcf2163c632f608b07c73c515649139deea8e912d"} Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.509778 4799 scope.go:117] "RemoveContainer" containerID="e7a0e309cf45a770610353995fe6b9a965ef4fc84c6a5ba100184c0f49448c1f" Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.509943 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nv86b" Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.537595 4799 scope.go:117] "RemoveContainer" containerID="bc572081f75739fef7318591cfb7ac2bdfb35f1e1faf0dc5263f6bd353abbdaf" Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.544712 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nv86b"] Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.557177 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nv86b"] Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.561602 4799 scope.go:117] "RemoveContainer" containerID="8bac53a15a6164f3cdae6ec21901470d0f2b3e3add96cc2b46ea3f6ddf119b3b" Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.600216 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-xt8bd_de26c870-5c19-414b-9222-c0cd1419550d/cert-manager-webhook/0.log" Jan 21 19:19:28 crc kubenswrapper[4799]: I0121 19:19:28.711758 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-pmjm6_b64d5a15-e3a7-45be-a22f-730946419bd4/cert-manager-cainjector/0.log" Jan 21 19:19:30 crc kubenswrapper[4799]: I0121 19:19:30.216973 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" path="/var/lib/kubelet/pods/c05a837c-3d12-486b-994f-303ddfe81b4f/volumes" Jan 21 19:19:41 crc kubenswrapper[4799]: I0121 19:19:41.831306 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-84q5f_777bc3b4-a1aa-42ec-8639-f08d14be32b4/nmstate-console-plugin/0.log" Jan 21 19:19:41 crc kubenswrapper[4799]: I0121 19:19:41.991867 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-xcfs7_f1d83e65-f17f-4802-a0be-536f18cfe6e2/nmstate-handler/0.log" Jan 21 19:19:42 crc kubenswrapper[4799]: I0121 19:19:42.145974 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-qcsdq_eeedecf6-13c6-4102-a889-a3cec17f120c/kube-rbac-proxy/0.log" Jan 21 19:19:42 crc kubenswrapper[4799]: I0121 19:19:42.255652 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-qcsdq_eeedecf6-13c6-4102-a889-a3cec17f120c/nmstate-metrics/0.log" Jan 21 19:19:42 crc kubenswrapper[4799]: I0121 19:19:42.304724 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-njm8b_fd2c58b3-6fc3-4391-8397-c2b1078e48b8/nmstate-operator/0.log" Jan 21 19:19:42 crc kubenswrapper[4799]: I0121 19:19:42.472162 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-xkl27_9445e980-390c-4759-9dcb-aa2a906f773a/nmstate-webhook/0.log" Jan 21 19:19:56 crc kubenswrapper[4799]: I0121 19:19:56.232507 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-58w9k_d743d591-b616-4e57-8395-ef3565083899/prometheus-operator/0.log" Jan 21 19:19:56 crc kubenswrapper[4799]: I0121 19:19:56.402361 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t_77f855af-53b1-4152-bbff-c818ffa1e32e/prometheus-operator-admission-webhook/0.log" Jan 21 19:19:56 crc kubenswrapper[4799]: I0121 19:19:56.405390 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf_44710bca-2659-43a9-9454-e12123e0c965/prometheus-operator-admission-webhook/0.log" Jan 21 19:19:56 crc kubenswrapper[4799]: I0121 19:19:56.608210 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-xrwhp_d38deaee-a893-47a5-b3d5-c1ea392a894b/operator/0.log" Jan 21 19:19:56 crc kubenswrapper[4799]: I0121 19:19:56.642275 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-w5tlc_3202bb55-0262-452a-9cfe-93088a43c767/perses-operator/0.log" Jan 21 19:20:10 crc kubenswrapper[4799]: I0121 19:20:10.445862 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-sbtsn_0af2a3ea-da2e-4b99-9486-ce12263a62bf/kube-rbac-proxy/0.log" Jan 21 19:20:10 crc kubenswrapper[4799]: I0121 19:20:10.522382 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-sbtsn_0af2a3ea-da2e-4b99-9486-ce12263a62bf/controller/0.log" Jan 21 19:20:10 crc kubenswrapper[4799]: I0121 19:20:10.645370 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-frr-files/0.log" Jan 21 19:20:10 crc kubenswrapper[4799]: I0121 19:20:10.834460 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-reloader/0.log" Jan 21 19:20:10 crc kubenswrapper[4799]: I0121 19:20:10.866092 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-metrics/0.log" Jan 21 19:20:10 crc kubenswrapper[4799]: I0121 19:20:10.866116 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-frr-files/0.log" Jan 21 19:20:10 crc kubenswrapper[4799]: I0121 19:20:10.867651 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-reloader/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.025789 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-frr-files/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.034042 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-reloader/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.056102 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-metrics/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.075509 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-metrics/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.225395 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-metrics/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.225436 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-reloader/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.226890 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/cp-frr-files/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.268379 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/controller/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.413699 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/frr-metrics/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.414691 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/kube-rbac-proxy/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.466200 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/kube-rbac-proxy-frr/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.631290 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/reloader/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.710779 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-4n47t_1568add4-52bd-4796-87e0-2d9fc9f92324/frr-k8s-webhook-server/0.log" Jan 21 19:20:11 crc kubenswrapper[4799]: I0121 19:20:11.916863 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-58dfbb9557-5cwxj_7007eeb3-f638-4564-bef1-01c1799f9659/manager/0.log" Jan 21 19:20:12 crc kubenswrapper[4799]: I0121 19:20:12.089194 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-766568f764-6v2cv_c911a8ad-608f-480c-83b2-672c420e3091/webhook-server/0.log" Jan 21 19:20:12 crc kubenswrapper[4799]: I0121 19:20:12.223974 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cdw6h_6c55e902-cf8f-4a8d-ade3-4bd470144d8e/kube-rbac-proxy/0.log" Jan 21 19:20:12 crc kubenswrapper[4799]: I0121 19:20:12.798888 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-cdw6h_6c55e902-cf8f-4a8d-ade3-4bd470144d8e/speaker/0.log" Jan 21 19:20:13 crc kubenswrapper[4799]: I0121 19:20:13.254086 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-74wj8_1f8c16b9-b58d-4bf1-a086-47e9c8339544/frr/0.log" Jan 21 19:20:25 crc kubenswrapper[4799]: I0121 19:20:25.407494 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/util/0.log" Jan 21 19:20:25 crc kubenswrapper[4799]: I0121 19:20:25.649517 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/pull/0.log" Jan 21 19:20:25 crc kubenswrapper[4799]: I0121 19:20:25.670740 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/util/0.log" Jan 21 19:20:25 crc kubenswrapper[4799]: I0121 19:20:25.711476 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/pull/0.log" Jan 21 19:20:25 crc kubenswrapper[4799]: I0121 19:20:25.899795 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/pull/0.log" Jan 21 19:20:25 crc kubenswrapper[4799]: I0121 19:20:25.913074 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/util/0.log" Jan 21 19:20:25 crc kubenswrapper[4799]: I0121 19:20:25.949666 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcbmjgf_dfb67070-f383-42b8-bb55-1406f6994a95/extract/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.061190 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/util/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.212257 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/util/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.217687 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/pull/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.227068 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/pull/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.399280 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/util/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.410520 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/pull/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.433225 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713prtbc_fd2efc6b-139c-4450-8665-e5d4a013ed30/extract/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.578173 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/util/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.762107 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/pull/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.768682 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/pull/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.777312 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/util/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.934454 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/util/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.942016 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/pull/0.log" Jan 21 19:20:26 crc kubenswrapper[4799]: I0121 19:20:26.966920 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08ld6gr_7941bf3d-097c-45f0-a09c-9514ab8f672d/extract/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.083234 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-utilities/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.311520 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-content/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.317619 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-content/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.348816 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-utilities/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.508429 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-content/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.525181 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/extract-utilities/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.709114 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7r7ss_87d5cfd3-7d03-4ff2-9db4-5228fca84f1c/registry-server/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.729815 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-utilities/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.862502 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-utilities/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.880809 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-content/0.log" Jan 21 19:20:27 crc kubenswrapper[4799]: I0121 19:20:27.923816 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-content/0.log" Jan 21 19:20:28 crc kubenswrapper[4799]: I0121 19:20:28.075809 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-utilities/0.log" Jan 21 19:20:28 crc kubenswrapper[4799]: I0121 19:20:28.131120 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/extract-content/0.log" Jan 21 19:20:28 crc kubenswrapper[4799]: I0121 19:20:28.282694 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-g4vrr_1f389163-50cd-4aaa-9b7c-82358ab47826/marketplace-operator/0.log" Jan 21 19:20:28 crc kubenswrapper[4799]: I0121 19:20:28.386485 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-utilities/0.log" Jan 21 19:20:28 crc kubenswrapper[4799]: I0121 19:20:28.637737 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-utilities/0.log" Jan 21 19:20:28 crc kubenswrapper[4799]: I0121 19:20:28.660615 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-content/0.log" Jan 21 19:20:28 crc kubenswrapper[4799]: I0121 19:20:28.782182 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-content/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.016251 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-utilities/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.039755 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/extract-content/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.247862 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fr4rq_1da92736-ae07-4de0-b2a0-2f2fec07749a/registry-server/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.295544 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-utilities/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.298115 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f9tnv_5cb24916-faef-4a1c-8e2c-c51d108d915e/registry-server/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.430488 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-utilities/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.484497 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-content/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.491230 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-content/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.684095 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-content/0.log" Jan 21 19:20:29 crc kubenswrapper[4799]: I0121 19:20:29.712991 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/extract-utilities/0.log" Jan 21 19:20:30 crc kubenswrapper[4799]: I0121 19:20:30.758359 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p6ls8_57d3c4d8-2186-406a-bac8-d3b062232299/registry-server/0.log" Jan 21 19:20:42 crc kubenswrapper[4799]: I0121 19:20:42.108436 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-58w9k_d743d591-b616-4e57-8395-ef3565083899/prometheus-operator/0.log" Jan 21 19:20:42 crc kubenswrapper[4799]: I0121 19:20:42.113638 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-686f76cdd5-jrggf_44710bca-2659-43a9-9454-e12123e0c965/prometheus-operator-admission-webhook/0.log" Jan 21 19:20:42 crc kubenswrapper[4799]: I0121 19:20:42.161060 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-686f76cdd5-2t67t_77f855af-53b1-4152-bbff-c818ffa1e32e/prometheus-operator-admission-webhook/0.log" Jan 21 19:20:42 crc kubenswrapper[4799]: I0121 19:20:42.284537 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-w5tlc_3202bb55-0262-452a-9cfe-93088a43c767/perses-operator/0.log" Jan 21 19:20:42 crc kubenswrapper[4799]: I0121 19:20:42.304592 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-xrwhp_d38deaee-a893-47a5-b3d5-c1ea392a894b/operator/0.log" Jan 21 19:20:55 crc kubenswrapper[4799]: I0121 19:20:55.971028 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:20:55 crc kubenswrapper[4799]: I0121 19:20:55.971524 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:21:25 crc kubenswrapper[4799]: I0121 19:21:25.971453 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:21:25 crc kubenswrapper[4799]: I0121 19:21:25.972452 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:21:55 crc kubenswrapper[4799]: I0121 19:21:55.970621 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:21:55 crc kubenswrapper[4799]: I0121 19:21:55.971485 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:21:55 crc kubenswrapper[4799]: I0121 19:21:55.971545 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 19:21:55 crc kubenswrapper[4799]: I0121 19:21:55.972226 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c40775b8a431be9bcc384aad4a591d1cfadc9d5fcac27070146e3f779cf50aab"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 19:21:55 crc kubenswrapper[4799]: I0121 19:21:55.972311 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://c40775b8a431be9bcc384aad4a591d1cfadc9d5fcac27070146e3f779cf50aab" gracePeriod=600 Jan 21 19:21:56 crc kubenswrapper[4799]: I0121 19:21:56.219374 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="c40775b8a431be9bcc384aad4a591d1cfadc9d5fcac27070146e3f779cf50aab" exitCode=0 Jan 21 19:21:56 crc kubenswrapper[4799]: I0121 19:21:56.220976 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"c40775b8a431be9bcc384aad4a591d1cfadc9d5fcac27070146e3f779cf50aab"} Jan 21 19:21:56 crc kubenswrapper[4799]: I0121 19:21:56.221089 4799 scope.go:117] "RemoveContainer" containerID="8f1ba133f70a5b9f451349bf5092699aaf43247a3719014c90228a2b79e4fb5d" Jan 21 19:21:57 crc kubenswrapper[4799]: I0121 19:21:57.233467 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerStarted","Data":"11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810"} Jan 21 19:22:42 crc kubenswrapper[4799]: I0121 19:22:42.394225 4799 scope.go:117] "RemoveContainer" containerID="a277726e87ee9fb20268c24407df2ef1f780997b6f9b110afe6217ed6821a40a" Jan 21 19:22:50 crc kubenswrapper[4799]: I0121 19:22:50.816862 4799 generic.go:334] "Generic (PLEG): container finished" podID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerID="07bf3e610a46c2af8214f20ad6ffdca5b0aad1ad1188a841b9d36354b530d1a5" exitCode=0 Jan 21 19:22:50 crc kubenswrapper[4799]: I0121 19:22:50.816957 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" event={"ID":"4f422c5e-e37c-49ba-b445-fd8f178ee3e3","Type":"ContainerDied","Data":"07bf3e610a46c2af8214f20ad6ffdca5b0aad1ad1188a841b9d36354b530d1a5"} Jan 21 19:22:50 crc kubenswrapper[4799]: I0121 19:22:50.818062 4799 scope.go:117] "RemoveContainer" containerID="07bf3e610a46c2af8214f20ad6ffdca5b0aad1ad1188a841b9d36354b530d1a5" Jan 21 19:22:51 crc kubenswrapper[4799]: I0121 19:22:51.013890 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-g6g7f_must-gather-tvtrx_4f422c5e-e37c-49ba-b445-fd8f178ee3e3/gather/0.log" Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.449198 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-g6g7f/must-gather-tvtrx"] Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.455993 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" podUID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerName="copy" containerID="cri-o://37cd0687dcf0248ae11a5eac750b6b8ac6e878750a4e427bac1c61f81c0ece8f" gracePeriod=2 Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.458060 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-g6g7f/must-gather-tvtrx"] Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.967045 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-g6g7f_must-gather-tvtrx_4f422c5e-e37c-49ba-b445-fd8f178ee3e3/copy/0.log" Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.967844 4799 generic.go:334] "Generic (PLEG): container finished" podID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerID="37cd0687dcf0248ae11a5eac750b6b8ac6e878750a4e427bac1c61f81c0ece8f" exitCode=143 Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.967894 4799 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="701f1b92b1a43a61cd8f770cd5b80351dee275154fb14fb48a8a7cbf930081ad" Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.987513 4799 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-g6g7f_must-gather-tvtrx_4f422c5e-e37c-49ba-b445-fd8f178ee3e3/copy/0.log" Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.987912 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:23:04 crc kubenswrapper[4799]: I0121 19:23:04.999908 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-must-gather-output\") pod \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\" (UID: \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\") " Jan 21 19:23:05 crc kubenswrapper[4799]: I0121 19:23:05.000057 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz666\" (UniqueName: \"kubernetes.io/projected/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-kube-api-access-wz666\") pod \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\" (UID: \"4f422c5e-e37c-49ba-b445-fd8f178ee3e3\") " Jan 21 19:23:05 crc kubenswrapper[4799]: I0121 19:23:05.012944 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-kube-api-access-wz666" (OuterVolumeSpecName: "kube-api-access-wz666") pod "4f422c5e-e37c-49ba-b445-fd8f178ee3e3" (UID: "4f422c5e-e37c-49ba-b445-fd8f178ee3e3"). InnerVolumeSpecName "kube-api-access-wz666". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:23:05 crc kubenswrapper[4799]: I0121 19:23:05.103582 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz666\" (UniqueName: \"kubernetes.io/projected/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-kube-api-access-wz666\") on node \"crc\" DevicePath \"\"" Jan 21 19:23:05 crc kubenswrapper[4799]: I0121 19:23:05.224688 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "4f422c5e-e37c-49ba-b445-fd8f178ee3e3" (UID: "4f422c5e-e37c-49ba-b445-fd8f178ee3e3"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:23:05 crc kubenswrapper[4799]: I0121 19:23:05.309242 4799 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4f422c5e-e37c-49ba-b445-fd8f178ee3e3-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 21 19:23:05 crc kubenswrapper[4799]: I0121 19:23:05.980114 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-g6g7f/must-gather-tvtrx" Jan 21 19:23:06 crc kubenswrapper[4799]: I0121 19:23:06.227057 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" path="/var/lib/kubelet/pods/4f422c5e-e37c-49ba-b445-fd8f178ee3e3/volumes" Jan 21 19:23:42 crc kubenswrapper[4799]: I0121 19:23:42.468881 4799 scope.go:117] "RemoveContainer" containerID="6074dd1f09c3a0aac68048aa7fb66c7bd3bb8f0101f30c080cb2bc11b3134243" Jan 21 19:23:42 crc kubenswrapper[4799]: I0121 19:23:42.496847 4799 scope.go:117] "RemoveContainer" containerID="07bf3e610a46c2af8214f20ad6ffdca5b0aad1ad1188a841b9d36354b530d1a5" Jan 21 19:23:42 crc kubenswrapper[4799]: I0121 19:23:42.584568 4799 scope.go:117] "RemoveContainer" containerID="37cd0687dcf0248ae11a5eac750b6b8ac6e878750a4e427bac1c61f81c0ece8f" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.980554 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rjdf5"] Jan 21 19:24:18 crc kubenswrapper[4799]: E0121 19:24:17.982079 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerName="registry-server" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982103 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerName="registry-server" Jan 21 19:24:18 crc kubenswrapper[4799]: E0121 19:24:17.982160 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerName="extract-content" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982173 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerName="extract-content" Jan 21 19:24:18 crc kubenswrapper[4799]: E0121 19:24:17.982197 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerName="extract-utilities" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982209 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerName="extract-utilities" Jan 21 19:24:18 crc kubenswrapper[4799]: E0121 19:24:17.982238 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerName="extract-utilities" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982249 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerName="extract-utilities" Jan 21 19:24:18 crc kubenswrapper[4799]: E0121 19:24:17.982275 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerName="registry-server" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982285 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerName="registry-server" Jan 21 19:24:18 crc kubenswrapper[4799]: E0121 19:24:17.982304 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerName="gather" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982314 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerName="gather" Jan 21 19:24:18 crc kubenswrapper[4799]: E0121 19:24:17.982340 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerName="copy" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982350 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerName="copy" Jan 21 19:24:18 crc kubenswrapper[4799]: E0121 19:24:17.982388 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerName="extract-content" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982398 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerName="extract-content" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982735 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05a837c-3d12-486b-994f-303ddfe81b4f" containerName="registry-server" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982761 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1aa6b3b-0d23-4f9b-99c5-82c4e67640cb" containerName="registry-server" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982790 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerName="copy" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.982815 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f422c5e-e37c-49ba-b445-fd8f178ee3e3" containerName="gather" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.985043 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:17.995846 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rjdf5"] Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.062289 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-utilities\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.062458 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-catalog-content\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.062616 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfs65\" (UniqueName: \"kubernetes.io/projected/1d7d1d41-ac0a-4910-902b-6993532a0f6b-kube-api-access-kfs65\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.165445 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-utilities\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.165666 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-catalog-content\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.165868 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfs65\" (UniqueName: \"kubernetes.io/projected/1d7d1d41-ac0a-4910-902b-6993532a0f6b-kube-api-access-kfs65\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.165940 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-utilities\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.166102 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-catalog-content\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.192562 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfs65\" (UniqueName: \"kubernetes.io/projected/1d7d1d41-ac0a-4910-902b-6993532a0f6b-kube-api-access-kfs65\") pod \"redhat-operators-rjdf5\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.349356 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:18 crc kubenswrapper[4799]: I0121 19:24:18.859914 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rjdf5"] Jan 21 19:24:19 crc kubenswrapper[4799]: I0121 19:24:19.794934 4799 generic.go:334] "Generic (PLEG): container finished" podID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerID="2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910" exitCode=0 Jan 21 19:24:19 crc kubenswrapper[4799]: I0121 19:24:19.795213 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjdf5" event={"ID":"1d7d1d41-ac0a-4910-902b-6993532a0f6b","Type":"ContainerDied","Data":"2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910"} Jan 21 19:24:19 crc kubenswrapper[4799]: I0121 19:24:19.795244 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjdf5" event={"ID":"1d7d1d41-ac0a-4910-902b-6993532a0f6b","Type":"ContainerStarted","Data":"79fccba5d64cec817e2a81f81db849478a31f0e464279a826b3034903be80c66"} Jan 21 19:24:19 crc kubenswrapper[4799]: I0121 19:24:19.800490 4799 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 21 19:24:21 crc kubenswrapper[4799]: I0121 19:24:21.815926 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjdf5" event={"ID":"1d7d1d41-ac0a-4910-902b-6993532a0f6b","Type":"ContainerStarted","Data":"b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18"} Jan 21 19:24:23 crc kubenswrapper[4799]: I0121 19:24:23.838515 4799 generic.go:334] "Generic (PLEG): container finished" podID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerID="b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18" exitCode=0 Jan 21 19:24:23 crc kubenswrapper[4799]: I0121 19:24:23.838648 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjdf5" event={"ID":"1d7d1d41-ac0a-4910-902b-6993532a0f6b","Type":"ContainerDied","Data":"b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18"} Jan 21 19:24:24 crc kubenswrapper[4799]: I0121 19:24:24.851314 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjdf5" event={"ID":"1d7d1d41-ac0a-4910-902b-6993532a0f6b","Type":"ContainerStarted","Data":"1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe"} Jan 21 19:24:24 crc kubenswrapper[4799]: I0121 19:24:24.875457 4799 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rjdf5" podStartSLOduration=3.37318288 podStartE2EDuration="7.875437017s" podCreationTimestamp="2026-01-21 19:24:17 +0000 UTC" firstStartedPulling="2026-01-21 19:24:19.800232535 +0000 UTC m=+6686.426522558" lastFinishedPulling="2026-01-21 19:24:24.302486682 +0000 UTC m=+6690.928776695" observedRunningTime="2026-01-21 19:24:24.868530531 +0000 UTC m=+6691.494820594" watchObservedRunningTime="2026-01-21 19:24:24.875437017 +0000 UTC m=+6691.501727040" Jan 21 19:24:25 crc kubenswrapper[4799]: I0121 19:24:25.970635 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:24:25 crc kubenswrapper[4799]: I0121 19:24:25.970964 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:24:28 crc kubenswrapper[4799]: I0121 19:24:28.349961 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:28 crc kubenswrapper[4799]: I0121 19:24:28.350415 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:29 crc kubenswrapper[4799]: I0121 19:24:29.401978 4799 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rjdf5" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="registry-server" probeResult="failure" output=< Jan 21 19:24:29 crc kubenswrapper[4799]: timeout: failed to connect service ":50051" within 1s Jan 21 19:24:29 crc kubenswrapper[4799]: > Jan 21 19:24:38 crc kubenswrapper[4799]: I0121 19:24:38.426935 4799 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:38 crc kubenswrapper[4799]: I0121 19:24:38.513016 4799 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:38 crc kubenswrapper[4799]: I0121 19:24:38.682224 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rjdf5"] Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.026531 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rjdf5" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="registry-server" containerID="cri-o://1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe" gracePeriod=2 Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.491612 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.603960 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-utilities\") pod \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.604346 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfs65\" (UniqueName: \"kubernetes.io/projected/1d7d1d41-ac0a-4910-902b-6993532a0f6b-kube-api-access-kfs65\") pod \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.604539 4799 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-catalog-content\") pod \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\" (UID: \"1d7d1d41-ac0a-4910-902b-6993532a0f6b\") " Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.604918 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-utilities" (OuterVolumeSpecName: "utilities") pod "1d7d1d41-ac0a-4910-902b-6993532a0f6b" (UID: "1d7d1d41-ac0a-4910-902b-6993532a0f6b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.605333 4799 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-utilities\") on node \"crc\" DevicePath \"\"" Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.611440 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d7d1d41-ac0a-4910-902b-6993532a0f6b-kube-api-access-kfs65" (OuterVolumeSpecName: "kube-api-access-kfs65") pod "1d7d1d41-ac0a-4910-902b-6993532a0f6b" (UID: "1d7d1d41-ac0a-4910-902b-6993532a0f6b"). InnerVolumeSpecName "kube-api-access-kfs65". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.706946 4799 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfs65\" (UniqueName: \"kubernetes.io/projected/1d7d1d41-ac0a-4910-902b-6993532a0f6b-kube-api-access-kfs65\") on node \"crc\" DevicePath \"\"" Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.739710 4799 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d7d1d41-ac0a-4910-902b-6993532a0f6b" (UID: "1d7d1d41-ac0a-4910-902b-6993532a0f6b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 21 19:24:40 crc kubenswrapper[4799]: I0121 19:24:40.809549 4799 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d1d41-ac0a-4910-902b-6993532a0f6b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.042259 4799 generic.go:334] "Generic (PLEG): container finished" podID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerID="1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe" exitCode=0 Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.042347 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjdf5" event={"ID":"1d7d1d41-ac0a-4910-902b-6993532a0f6b","Type":"ContainerDied","Data":"1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe"} Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.042385 4799 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rjdf5" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.043958 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjdf5" event={"ID":"1d7d1d41-ac0a-4910-902b-6993532a0f6b","Type":"ContainerDied","Data":"79fccba5d64cec817e2a81f81db849478a31f0e464279a826b3034903be80c66"} Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.044076 4799 scope.go:117] "RemoveContainer" containerID="1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.091069 4799 scope.go:117] "RemoveContainer" containerID="b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.095984 4799 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rjdf5"] Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.107670 4799 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rjdf5"] Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.124351 4799 scope.go:117] "RemoveContainer" containerID="2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.167232 4799 scope.go:117] "RemoveContainer" containerID="1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe" Jan 21 19:24:41 crc kubenswrapper[4799]: E0121 19:24:41.168063 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe\": container with ID starting with 1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe not found: ID does not exist" containerID="1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.168161 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe"} err="failed to get container status \"1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe\": rpc error: code = NotFound desc = could not find container \"1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe\": container with ID starting with 1ce6bfdc2d2c172bf78ff090f7adbd089535b3e3f3941b50dae16bb71c0201fe not found: ID does not exist" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.168208 4799 scope.go:117] "RemoveContainer" containerID="b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18" Jan 21 19:24:41 crc kubenswrapper[4799]: E0121 19:24:41.168846 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18\": container with ID starting with b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18 not found: ID does not exist" containerID="b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.168889 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18"} err="failed to get container status \"b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18\": rpc error: code = NotFound desc = could not find container \"b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18\": container with ID starting with b66c08e1bc25077ebaf85af081010252a6583d150f0566d44fa55af0426d6b18 not found: ID does not exist" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.168916 4799 scope.go:117] "RemoveContainer" containerID="2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910" Jan 21 19:24:41 crc kubenswrapper[4799]: E0121 19:24:41.169256 4799 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910\": container with ID starting with 2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910 not found: ID does not exist" containerID="2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910" Jan 21 19:24:41 crc kubenswrapper[4799]: I0121 19:24:41.169295 4799 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910"} err="failed to get container status \"2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910\": rpc error: code = NotFound desc = could not find container \"2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910\": container with ID starting with 2ca013ed38d2d6d1143e5aa25ac9acc19c3ef92b5e6bf8c8d88271656cb28910 not found: ID does not exist" Jan 21 19:24:42 crc kubenswrapper[4799]: I0121 19:24:42.215661 4799 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" path="/var/lib/kubelet/pods/1d7d1d41-ac0a-4910-902b-6993532a0f6b/volumes" Jan 21 19:24:55 crc kubenswrapper[4799]: I0121 19:24:55.971352 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:24:55 crc kubenswrapper[4799]: I0121 19:24:55.972052 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:25:25 crc kubenswrapper[4799]: I0121 19:25:25.971370 4799 patch_prober.go:28] interesting pod/machine-config-daemon-snc2s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 21 19:25:25 crc kubenswrapper[4799]: I0121 19:25:25.972202 4799 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 21 19:25:25 crc kubenswrapper[4799]: I0121 19:25:25.972397 4799 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" Jan 21 19:25:25 crc kubenswrapper[4799]: I0121 19:25:25.973976 4799 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810"} pod="openshift-machine-config-operator/machine-config-daemon-snc2s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 21 19:25:25 crc kubenswrapper[4799]: I0121 19:25:25.974129 4799 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerName="machine-config-daemon" containerID="cri-o://11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" gracePeriod=600 Jan 21 19:25:26 crc kubenswrapper[4799]: E0121 19:25:26.103730 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:25:26 crc kubenswrapper[4799]: I0121 19:25:26.577431 4799 generic.go:334] "Generic (PLEG): container finished" podID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" exitCode=0 Jan 21 19:25:26 crc kubenswrapper[4799]: I0121 19:25:26.577539 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" event={"ID":"3a9a6c57-0a82-4115-b895-c414b0cc6a3b","Type":"ContainerDied","Data":"11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810"} Jan 21 19:25:26 crc kubenswrapper[4799]: I0121 19:25:26.577647 4799 scope.go:117] "RemoveContainer" containerID="c40775b8a431be9bcc384aad4a591d1cfadc9d5fcac27070146e3f779cf50aab" Jan 21 19:25:26 crc kubenswrapper[4799]: I0121 19:25:26.579648 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:25:26 crc kubenswrapper[4799]: E0121 19:25:26.580107 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:25:37 crc kubenswrapper[4799]: I0121 19:25:37.205164 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:25:37 crc kubenswrapper[4799]: E0121 19:25:37.206173 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:25:50 crc kubenswrapper[4799]: I0121 19:25:50.212824 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:25:50 crc kubenswrapper[4799]: E0121 19:25:50.213978 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:26:03 crc kubenswrapper[4799]: I0121 19:26:03.205930 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:26:03 crc kubenswrapper[4799]: E0121 19:26:03.206803 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:26:18 crc kubenswrapper[4799]: I0121 19:26:18.205813 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:26:18 crc kubenswrapper[4799]: E0121 19:26:18.208352 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:26:30 crc kubenswrapper[4799]: I0121 19:26:30.205451 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:26:30 crc kubenswrapper[4799]: E0121 19:26:30.206276 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:26:43 crc kubenswrapper[4799]: I0121 19:26:43.205994 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:26:43 crc kubenswrapper[4799]: E0121 19:26:43.206927 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:26:58 crc kubenswrapper[4799]: I0121 19:26:58.206680 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:26:58 crc kubenswrapper[4799]: E0121 19:26:58.209903 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:27:12 crc kubenswrapper[4799]: I0121 19:27:12.205029 4799 scope.go:117] "RemoveContainer" containerID="11aa8d9973ddb478a54f1f299d4722eee03f0b12d1c76e09c2255afd05c89810" Jan 21 19:27:12 crc kubenswrapper[4799]: E0121 19:27:12.206005 4799 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-snc2s_openshift-machine-config-operator(3a9a6c57-0a82-4115-b895-c414b0cc6a3b)\"" pod="openshift-machine-config-operator/machine-config-daemon-snc2s" podUID="3a9a6c57-0a82-4115-b895-c414b0cc6a3b" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.811961 4799 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xj4hd"] Jan 21 19:27:18 crc kubenswrapper[4799]: E0121 19:27:18.812956 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="extract-utilities" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.812970 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="extract-utilities" Jan 21 19:27:18 crc kubenswrapper[4799]: E0121 19:27:18.813006 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="registry-server" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.813019 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="registry-server" Jan 21 19:27:18 crc kubenswrapper[4799]: E0121 19:27:18.813039 4799 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="extract-content" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.813046 4799 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="extract-content" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.813259 4799 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d7d1d41-ac0a-4910-902b-6993532a0f6b" containerName="registry-server" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.814788 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.833759 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xj4hd"] Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.974693 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/842f52b3-0ffe-4883-8299-1e28ce15213e-utilities\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.974992 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p4gg\" (UniqueName: \"kubernetes.io/projected/842f52b3-0ffe-4883-8299-1e28ce15213e-kube-api-access-4p4gg\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:18 crc kubenswrapper[4799]: I0121 19:27:18.975202 4799 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/842f52b3-0ffe-4883-8299-1e28ce15213e-catalog-content\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.077463 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/842f52b3-0ffe-4883-8299-1e28ce15213e-catalog-content\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.077707 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/842f52b3-0ffe-4883-8299-1e28ce15213e-utilities\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.077749 4799 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p4gg\" (UniqueName: \"kubernetes.io/projected/842f52b3-0ffe-4883-8299-1e28ce15213e-kube-api-access-4p4gg\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.078071 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/842f52b3-0ffe-4883-8299-1e28ce15213e-catalog-content\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.078580 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/842f52b3-0ffe-4883-8299-1e28ce15213e-utilities\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.101892 4799 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p4gg\" (UniqueName: \"kubernetes.io/projected/842f52b3-0ffe-4883-8299-1e28ce15213e-kube-api-access-4p4gg\") pod \"redhat-marketplace-xj4hd\" (UID: \"842f52b3-0ffe-4883-8299-1e28ce15213e\") " pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.155940 4799 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xj4hd" Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.712932 4799 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xj4hd"] Jan 21 19:27:19 crc kubenswrapper[4799]: I0121 19:27:19.880697 4799 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xj4hd" event={"ID":"842f52b3-0ffe-4883-8299-1e28ce15213e","Type":"ContainerStarted","Data":"c6430865da96b2068f325837b74f85195023c15970d7535a09ce05b31ad6b87b"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515134224241024443 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015134224242017361 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015134206320016501 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015134206321015452 5ustar corecore